3.1.0 release.

This commit is contained in:
harfang3dadmin 2021-12-15 20:23:12 +01:00
parent eeff062611
commit ecd4dad0d6
194 changed files with 9517 additions and 10695 deletions

View File

@ -293,7 +293,7 @@ if(HG_BINDING_DEFINES_LIST)
set(HG_BINDING_DEFINES "--defines" ${HG_BINDING_DEFINES_LIST})
endif()
if( HG_BUILD_CPP_SDK OR HG_BUILD_ASSIMP_CONVERTER OR HG_BUILD_FBX_CONVERTER OR HG_BUILD_GLTF_EXPORTER OR HG_BUILD_GLTF_IMPORTER OR HG_BUILD_ASSETC )
if(HG_BUILD_HG_LUA OR HG_BUILD_CPP_SDK OR HG_BUILD_ASSIMP_CONVERTER OR HG_BUILD_FBX_CONVERTER OR HG_BUILD_GLTF_EXPORTER OR HG_BUILD_GLTF_IMPORTER OR HG_BUILD_ASSETC)
add_subdirectory(extern)
add_subdirectory(binding)
endif()

13
LICENSE
View File

@ -1112,13 +1112,12 @@ Article 8 - COMMERCIAL USE of the SOFTWARE
8.1 COMMERCIALISATION DECLARATION
USERS who wish to use the SOFTWARE for COMMERCIAL PURPOSES should, prior to
placing a FINAL PRODUCT on the market, complete a COMMERCIALISATION DECLARATION
for NWNC, which can be accessed at the following address:
www.harfang3d.com/declaration.
USERS who wish to use the SOFTWARE for COMMERCIAL PURPOSES should, prior to
placing a FINAL PRODUCT on the market, complete a COMMERCIALISATION DECLARATION
for NWNC by sending an email to contact@harfang3d.com.
Companies must pay the appropriate rates for the COMMERCIALISATION DECLARATION,
which can be viewed at the following address: www.harfang3d.com/declaration.
which can be consulted by sending an email to contact@harfang3d.com.
The USER will be charged for any COMMERCIAL USE of the SOFTWARE in compliance
with NWNCs rates which entered into force on the same day as the
@ -1139,8 +1138,8 @@ FINAL PRODUCT lawfully with regard to NWNC.
The USER may affix a certification number to the FINAL PRODUCT.
The certification number will be freely available to the FINAL USER at the
following address www.harfang3d.com/certificate-check.
The certification number will be freely available to the FINAL USER by sending
an email to contact@harfang3d.com.
The USER consequently agrees to cite the CERTIFICATE OF COMPLIANCE and specify
how the FINAL USER may access it on the FINAL PRODUCT or in the General

View File

@ -977,6 +977,8 @@ def bind_scene(gen):
gen.bind_method(rigid_body, 'SetRestitution', 'void', ['float restitution'])
gen.bind_method(rigid_body, 'GetFriction', 'float', [])
gen.bind_method(rigid_body, 'SetFriction', 'void', ['float friction'])
gen.bind_method(rigid_body, 'GetRollingFriction', 'float', [])
gen.bind_method(rigid_body, 'SetRollingFriction', 'void', ['float rolling_friction'])
gen.end_class(rigid_body)
@ -990,6 +992,8 @@ def bind_scene(gen):
gen.bind_method(collision, 'GetType', 'hg::CollisionType', [])
gen.bind_method(collision, 'SetType', 'void', ['hg::CollisionType type'])
gen.bind_method(collision, 'GetLocalTransform', 'hg::Mat4', [])
gen.bind_method(collision, 'SetLocalTransform', 'void', ['hg::Mat4 m'])
gen.bind_method(collision, 'GetMass', 'float', [])
gen.bind_method(collision, 'SetMass', 'void', ['float mass'])
gen.bind_method(collision, 'GetRadius', 'float', [])
@ -1307,8 +1311,8 @@ def bind_scene(gen):
protos = [('hg::Node', ['hg::Scene &scene', 'const hg::Mat4 &mtx', 'const hg::ModelRef &model', 'const std::vector<hg::Material> &materials'], [])]
gen.bind_function_overloads('hg::CreateObject', expand_std_vector_proto(gen, protos))
gen.bind_function('hg::CreateInstanceFromFile', 'hg::Node', ['hg::Scene &scene', 'const hg::Mat4 &mtx', 'const std::string &name', 'hg::PipelineResources &resources', 'const hg::PipelineInfo &pipeline', '?uint32_t flags'], {'constants_group': {'flags': 'LoadSaveSceneFlags'}})
gen.bind_function('hg::CreateInstanceFromAssets', 'hg::Node', ['hg::Scene &scene', 'const hg::Mat4 &mtx', 'const std::string &name', 'hg::PipelineResources &resources', 'const hg::PipelineInfo &pipeline', '?uint32_t flags'], {'constants_group': {'flags': 'LoadSaveSceneFlags'}})
gen.bind_function('hg::CreateInstanceFromFile', 'hg::Node', ['hg::Scene &scene', 'const hg::Mat4 &mtx', 'const std::string &name', 'hg::PipelineResources &resources', 'const hg::PipelineInfo &pipeline', 'bool &success', '?uint32_t flags'], {'constants_group': {'flags': 'LoadSaveSceneFlags'}, 'arg_out': ['success']})
gen.bind_function('hg::CreateInstanceFromAssets', 'hg::Node', ['hg::Scene &scene', 'const hg::Mat4 &mtx', 'const std::string &name', 'hg::PipelineResources &resources', 'const hg::PipelineInfo &pipeline', 'bool &success', '?uint32_t flags'], {'constants_group': {'flags': 'LoadSaveSceneFlags'}, 'arg_out': ['success']})
gen.bind_function('hg::CreateScript', 'hg::Node', ['hg::Scene &scene', '?const std::string &path'])
@ -1497,6 +1501,7 @@ static std::vector<hg::ForwardPipelineLight> _GetSceneForwardPipelineLights(cons
gen.bind_function('hg::CreateForwardPipelineAAAFromFile', 'hg::ForwardPipelineAAA', ['const char *path', 'const hg::ForwardPipelineAAAConfig &config', '?bgfx::BackbufferRatio::Enum ssgi_ratio', '?bgfx::BackbufferRatio::Enum ssr_ratio'])
gen.bind_function('hg::CreateForwardPipelineAAAFromAssets', 'hg::ForwardPipelineAAA', ['const char *path', 'const hg::ForwardPipelineAAAConfig &config', '?bgfx::BackbufferRatio::Enum ssgi_ratio', '?bgfx::BackbufferRatio::Enum ssr_ratio'])
gen.bind_function('hg::DestroyForwardPipelineAAA', 'void', ['hg::ForwardPipelineAAA &pipeline'])
gen.bind_function('hg::IsValid', 'bool', ['const hg::ForwardPipelineAAA &pipeline'])
gen.bind_function('hg::UpdateForwardPipelineAAA', 'void', ['hg::ForwardPipeline &pipeline', 'const hg::Rect<int> &rect', 'const hg::Mat4 &view', 'const hg::Mat44 &proj',
'const hg::Mat4 &prv_view', 'const hg::Mat44 &prv_proj', 'const hg::tVec2<float> &jitter', 'bgfx::BackbufferRatio::Enum ssgi_ratio', 'bgfx::BackbufferRatio::Enum ssr_ratio', 'float temporal_aa_weight', 'float motion_blur_strength',
@ -1544,7 +1549,7 @@ def bind_bullet3_physics(gen):
# gen.bind_method(newton, 'CollectCollisionEvents', 'void', ['const hg::Scene &scene', 'hg::NodeNodeContacts &node_node_contacts'])
gen.bind_method(bullet, 'SyncKinematicBodiesFromScene', 'void', ['const hg::Scene &scene'])
gen.bind_method(bullet, 'SyncBodiesFromScene', 'void', ['const hg::Scene &scene'])
gen.bind_method(bullet, 'GarbageCollect', 'size_t', ['const hg::Scene &scene'])
gen.bind_method(bullet, 'GarbageCollectResources', 'size_t', [])
@ -1555,6 +1560,11 @@ def bind_bullet3_physics(gen):
#
gen.bind_method(bullet, 'NodeWake', 'void', ['const hg::Node &node'])
gen.bind_method(bullet, 'NodeSetDeactivation', 'void', ['const hg::Node &node', 'bool enable'])
gen.bind_method(bullet, 'NodeGetDeactivation', 'bool', ['const hg::Node &node'])
gen.bind_method(bullet, 'NodeResetWorld', 'void', ['const hg::Node &node', 'const hg::Mat4 &world'])
gen.bind_method(bullet, 'NodeAddForce', 'void', ['const hg::Node &node', 'const hg::Vec3 &F', '?const hg::Vec3 &world_pos'])
gen.bind_method(bullet, 'NodeAddImpulse', 'void', ['const hg::Node &node', 'const hg::Vec3 &dt_velocity', '?const hg::Vec3 &world_pos'])
gen.bind_method(bullet, 'NodeGetPointVelocity', 'hg::Vec3', ['const hg::Node &node', 'const hg::Vec3 &world_pos'])
@ -1564,6 +1574,11 @@ def bind_bullet3_physics(gen):
gen.bind_method(bullet, 'NodeGetAngularVelocity', 'hg::Vec3', ['const hg::Node &node'])
gen.bind_method(bullet, 'NodeSetAngularVelocity', 'void', ['const hg::Node &node', 'const hg::Vec3 &W'])
gen.bind_method(bullet, 'NodeGetLinearLockAxes', 'void', ['const hg::Node &node', 'bool &X', 'bool &Y', 'bool &Z'], {'arg_out': ['X', 'Y', 'Z']})
gen.bind_method(bullet, 'NodeSetLinearLockAxes', 'void', ['const hg::Node &node', 'bool X', 'bool Y', 'bool Z'])
gen.bind_method(bullet, 'NodeGetAngularLockAxes', 'void', ['const hg::Node &node', 'bool &X', 'bool &Y', 'bool &Z'], {'arg_out': ['X', 'Y', 'Z']})
gen.bind_method(bullet, 'NodeSetAngularLockAxes', 'void', ['const hg::Node &node', 'bool X', 'bool Y', 'bool Z'])
#
node_contacts = gen.begin_class('hg::NodeContacts')
gen.end_class(node_contacts)
@ -1594,6 +1609,7 @@ static std::vector<hg::Contact> __GetNodeContacts(const hg::NodeContacts &ctcs,
#
gen.bind_method(bullet, 'RaycastFirstHit', 'hg::RaycastOut', ['const hg::Scene &scene', 'const hg::Vec3 &p0', 'const hg::Vec3 &p1'])
gen.bind_method(bullet, 'RaycastAllHits', 'std::vector<hg::RaycastOut>', ['const hg::Scene &scene', 'const hg::Vec3 &p0', 'const hg::Vec3 &p1'])
#
gen.bind_method(bullet, 'RenderCollision', 'void', ['bgfx::ViewId view_id', 'const bgfx::VertexLayout &vtx_layout', 'bgfx::ProgramHandle prg', 'hg::RenderState render_state', 'uint32_t depth'])
@ -2114,6 +2130,8 @@ static void _SetViewTransform(bgfx::ViewId view_id, const hg::Mat4 &view, const
('TF_SamplerMinAnisotropic', 'BGFX_SAMPLER_MIN_ANISOTROPIC'),
('TF_SamplerMagPoint', 'BGFX_SAMPLER_MAG_POINT'),
('TF_SamplerMagAnisotropic', 'BGFX_SAMPLER_MAG_ANISOTROPIC'),
('TF_BlitDestination', 'BGFX_TEXTURE_BLIT_DST'),
('TF_ReadBack', 'BGFX_TEXTURE_READ_BACK'),
], 'TextureFlags')
gen.bind_function('hg::LoadTextureFlagsFromFile', 'uint64_t', ['const std::string &path'], {'rval_constants_group': 'TextureFlags'})
@ -2394,16 +2412,27 @@ static bgfx::TextureInfo _PipelineResources_GetTextureInfo(hg::PipelineResources
gen.bind_function('hg::CreateMissingMaterialProgramValuesFromAssets', 'void', ['hg::Material &mat', 'const hg::PipelineResources &resources'])
#
pipeline_frame_buffer = gen.begin_class('hg::PipelineFrameBuffer')
gen.bind_members(pipeline_frame_buffer, ['bgfx::FrameBufferHandle handle', 'hg::TextureRef color', 'hg::TextureRef depth'])
gen.end_class(pipeline_frame_buffer)
frame_buffer = gen.begin_class('hg::FrameBuffer')
gen.bind_member(frame_buffer, 'bgfx::FrameBufferHandle handle')
gen.end_class(frame_buffer)
gen.bind_function_overloads('hg::CreateFrameBuffer', [
('hg::PipelineFrameBuffer', ['bgfx::TextureFormat::Enum color_format', 'bgfx::TextureFormat::Enum depth_format', 'int aa', 'hg::PipelineResources &res', 'const char *name'], []),
('hg::PipelineFrameBuffer', ['int width', 'int height', 'bgfx::TextureFormat::Enum color_format', 'bgfx::TextureFormat::Enum depth_format', 'int aa', 'hg::PipelineResources &res', 'const char *name'], [])
('hg::FrameBuffer', ['const hg::Texture &color', 'const hg::Texture &depth', 'const char *name'], []),
('hg::FrameBuffer', ['bgfx::TextureFormat::Enum color_format', 'bgfx::TextureFormat::Enum depth_format', 'int aa', 'const char *name'], []),
('hg::FrameBuffer', ['int width', 'int height', 'bgfx::TextureFormat::Enum color_format', 'bgfx::TextureFormat::Enum depth_format', 'int aa', 'const char *name'], [])
])
gen.bind_function('hg::GetColorTexture', 'hg::Texture', ['hg::FrameBuffer &frameBuffer'])
gen.bind_function('hg::GetDepthTexture', 'hg::Texture', ['hg::FrameBuffer &frameBuffer'])
gen.bind_function('hg::DestroyFrameBuffer', 'void', ['hg::PipelineResources &res', 'hg::PipelineFrameBuffer &frameBuffer'])
gen.insert_binding_code('''
static void _FrameBuffer_GetTextures(hg::FrameBuffer &framebuffer, hg::Texture &color, hg::Texture &depth) {
color = hg::GetColorTexture(framebuffer);
depth = hg::GetDepthTexture(framebuffer);
}
''')
gen.bind_function('GetTextures', 'void', ['hg::FrameBuffer &framebuffer', 'hg::Texture &color', 'hg::Texture &depth'], {'route': route_lambda('_FrameBuffer_GetTextures'), 'arg_out': ['color', 'depth']})
gen.bind_function('hg::DestroyFrameBuffer', 'void', ['hg::FrameBuffer &frameBuffer'])
#
vertices = gen.begin_class('hg::Vertices')
@ -2786,6 +2815,11 @@ def bind_color(gen):
gen.bind_function('hg::ColorI', 'hg::Color', ['int r', 'int g', 'int b', '?int a'])
gen.bind_function('hg::ToHLS', 'hg::Color', ['const hg::Color &color'])
gen.bind_function('hg::FromHLS', 'hg::Color', ['const hg::Color &color'])
gen.bind_function('hg::SetSaturation', 'hg::Color', ['const hg::Color &color', 'float saturation'])
bind_std_vector(gen, color)

View File

@ -1,33 +1,38 @@
# Generates the Harfang API XML description used to generate the documentation.
add_custom_command(
OUTPUT
${CMAKE_CURRENT_BINARY_DIR}/harfang/api.xml
COMMAND
${Python3_EXECUTABLE} bind.py ${CMAKE_CURRENT_SOURCE_DIR}/../binding/bind_harfang.py --xml --out ${CMAKE_CURRENT_BINARY_DIR}/harfang ${HG_BINDING_DEFINES}
MAIN_DEPENDENCY
${CMAKE_SOURCE_DIR}/binding/bind_harfang.py
WORKING_DIRECTORY
${HG_FABGEN_PATH}
COMMENT
"Generating Harfang API description file")
# online docs
add_custom_target(online_docs ALL
${Python3_EXECUTABLE} doc_to_html.py --project_name Harfang --doc_path doc --api_path ${CMAKE_CURRENT_BINARY_DIR}/harfang/api.xml --out_path ${CMAKE_INSTALL_PREFIX}/online_docs --version ${HG_VERSION} --online
WORKING_DIRECTORY
${CMAKE_CURRENT_SOURCE_DIR}
DEPENDS
${CMAKE_CURRENT_BINARY_DIR}/harfang/api.xml)
install(DIRECTORY img DESTINATION online_docs COMPONENT online_docs)
set_target_properties(online_docs PROPERTIES FOLDER "harfang/doc")
add_custom_command(
OUTPUT
${CMAKE_CURRENT_BINARY_DIR}/harfang/api.xml
COMMAND
${Python3_EXECUTABLE} bind.py ${CMAKE_SOURCE_DIR}/binding/bind_harfang.py --xml --out ${CMAKE_CURRENT_BINARY_DIR}/harfang ${HG_BINDING_DEFINES}
MAIN_DEPENDENCY
${CMAKE_SOURCE_DIR}/binding/bind_harfang.py
WORKING_DIRECTORY
${HG_FABGEN_PATH}
COMMENT
"Generating Harfang API description file")
add_custom_target(gen_api_xml ALL DEPENDS ${CMAKE_CURRENT_BINARY_DIR}/harfang/api.xml)
# add_custom_command(
# OUTPUT
# ${CMAKE_CURRENT_BINARY_DIR}/harfang/api.xml
# COMMAND
# ${Python3_EXECUTABLE} bind.py ${CMAKE_CURRENT_SOURCE_DIR}/../binding/bind_harfang.py --xml --out ${CMAKE_CURRENT_BINARY_DIR}/harfang ${HG_BINDING_DEFINES}
# MAIN_DEPENDENCY
# ${CMAKE_SOURCE_DIR}/binding/bind_harfang.py
# WORKING_DIRECTORY
# ${HG_FABGEN_PATH}
# COMMENT
# "Generating Harfang API description file")
# offline docs
configure_file(doc/index.html.in ${CMAKE_INSTALL_PREFIX}/offline_docs/index.html @ONLY IMMEDIATE)
add_custom_target(offline_docs ALL
${Python3_EXECUTABLE} doc_to_html.py --project_name Harfang --doc_path doc --api_path ${CMAKE_CURRENT_BINARY_DIR}/harfang/api.xml --out_path ${CMAKE_INSTALL_PREFIX}/offline_docs --version ${HG_VERSION}
WORKING_DIRECTORY
${CMAKE_CURRENT_SOURCE_DIR}
DEPENDS
${CMAKE_CURRENT_BINARY_DIR}/harfang/api.xml)
install(DIRECTORY img DESTINATION offline_docs/content COMPONENT offline_docs)
set_target_properties(offline_docs PROPERTIES FOLDER "harfang/doc")
#configure_file(doc/index.html.in ${CMAKE_INSTALL_PREFIX}/offline_docs/index.html @ONLY IMMEDIATE)
#add_custom_target(offline_docs ALL
# ${Python3_EXECUTABLE} doc_to_html.py --project_name Harfang --doc_path doc --api_path ${CMAKE_CURRENT_BINARY_DIR}/harfang/api.xml --out_path ${CMAKE_INSTALL_PREFIX}/offline_docs --version ${HG_VERSION}
# WORKING_DIRECTORY
# ${CMAKE_CURRENT_SOURCE_DIR}
# DEPENDS
# ${CMAKE_CURRENT_BINARY_DIR}/harfang/api.xml)
#install(DIRECTORY img DESTINATION offline_docs/content COMPONENT offline_docs)
#set_target_properties(offline_docs PROPERTIES FOLDER "harfang/doc")

View File

@ -1,3 +1,3 @@
Add this component to a [Node] to implement the camera aspect.
Add this component to a [Node] to implement the camera aspect.
Create a camera component with [Scene_CreateCamera], use [CreateCamera] to create a complete camera node.

View File

@ -1,3 +1,3 @@
Duplicate a node and its child hierarchy. Resources will be load from the assets system.
Duplicate a node and its child hierarchy. Resources will be load from the assets system.
See [man.Assets].

View File

@ -1,3 +1,3 @@
Duplicate a node and its child hierarchy. Resources will be load from the local filesystem.
Duplicate a node and its child hierarchy. Resources will be load from the local filesystem.
See [man.Assets].

View File

@ -1,3 +1,3 @@
Duplicate a node. Resources will be load from the assets system.
Duplicate a node. Resources will be load from the assets system.
See [man.Assets].

View File

@ -1,3 +1,3 @@
Duplicate a node. Resources will be load from the local filesystem.
Duplicate a node. Resources will be load from the local filesystem.
See [man.Assets].

View File

@ -1,4 +1,4 @@
Advance the rendering backend to the next frame, execute all queued rendering commands.
This function returns the backend current frame.
Advance the rendering backend to the next frame, execute all queued rendering commands.
This function returns the backend current frame.
The frame counter is used by asynchronous functions such as [CaptureTexture]. You must wait for the frame counter to reach or exceed the value returned by an asynchronous function before accessing its result.

1
doc/doc/FromHLS.md Normal file
View File

@ -0,0 +1 @@
Convert input hue/luminance/saturation color to RGBA, alpha channel is left unmodified.

View File

@ -1,2 +1,2 @@
A view frustum, perspective or orthographic, holding the necessary information to perform culling queries.
A view frustum, perspective or orthographic, holding the necessary information to perform culling queries.
It can be used to test wether a volume is inside or outside the frustum it represents.

View File

@ -1 +1,3 @@
Return the total elapsed time since the object creation or the last call to [ResetClock].
Return the current clock since the last call to [TickClock] or [ResetClock].
See [time_to_sec_f] to convert the returned time to second.

View File

@ -0,0 +1 @@
Retrieves color texture attachment.

View File

@ -0,0 +1 @@
Retrieves depth texture attachment.

1
doc/doc/GetTextures.md Normal file
View File

@ -0,0 +1 @@
Returns color and depth texture attachments.

View File

@ -1,3 +1,3 @@
Create and append to a full screen menu-bar.
Create and append to a full screen menu-bar.
Note: Only call [ImGuiEndMainMenuBar] if this returns `true`.

View File

@ -1,3 +1,3 @@
Create a sub-menu entry.
Create a sub-menu entry.
Note: Only call [ImGuiEndMenu] if this returns `true`.

View File

@ -1,3 +1,3 @@
Start append to the menu-bar of the current window (requires the `WindowFlags_MenuBar` flag).
Start append to the menu-bar of the current window (requires the `WindowFlags_MenuBar` flag).
Note: Only call [ImGuiEndMenuBar] if this returns `true`.

View File

@ -1,3 +1,3 @@
Return `true` if popup is opened and starts outputting to it.
Return `true` if popup is opened and starts outputting to it.
Note: Only call [ImGuiEndPopup] if this returns `true`.

View File

@ -1,3 +1,3 @@
Returns the width of item given pushed settings and current cursor position.
Returns the width of item given pushed settings and current cursor position.
Note: This is not necessarily the width of last item.

View File

@ -1 +1,5 @@
Checkbox widget returning the check state.
Display a checkbox widget. Returns an interaction flag (user interacted with the widget) and the current widget state (checked or not after user interaction).
```python
was_clicked, my_value = gs.ImGuiCheckBox('My value', my_value)
```

View File

@ -1,5 +1,5 @@
Begin a column layout section.
To move to the next column use [ImGuiNextColumn]. To end a column layout section pass `1` to this function.
Begin a column layout section.
To move to the next column use [ImGuiNextColumn]. To end a column layout section pass `1` to this function.
**Note:** Current implementation supports a maximum of 64 columns.

View File

@ -1,3 +1,3 @@
Was the last item active.
Was the last item active.
e.g. button being held, text field being edited - items that do not interact will always return `false`.

View File

@ -1,9 +1,9 @@
Mark a named popup as open.
Popup windows are closed when the user:
* Clicks outside of their client rect,
* Activates a pressable item,
* [ImGuiCloseCurrentPopup] is called within a [ImGuiBeginPopup]/[ImGuiEndPopup] block.
Mark a named popup as open.
Popup windows are closed when the user:
* Clicks outside of their client rect,
* Activates a pressable item,
* [ImGuiCloseCurrentPopup] is called within a [ImGuiBeginPopup]/[ImGuiEndPopup] block.
Popup identifiers are relative to the current ID stack so [ImGuiOpenPopup] and [ImGuiBeginPopup] need to be at the same level of the ID stack.

View File

@ -1,11 +1,11 @@
Selectable item.
The following `width` values are possible:
* `= 0.0`: Use remaining width.
* `> 0.0`: Specific width.
The following `height` values are possible:
* `= 0.0`: Use label height.
Selectable item.
The following `width` values are possible:
* `= 0.0`: Use remaining width.
* `> 0.0`: Specific width.
The following `height` values are possible:
* `= 0.0`: Use label height.
* `> 0.0`: Specific height.

View File

@ -1,3 +1,3 @@
Wrapped static text.
Wrapped static text.
Note that this won't work on an auto-resizing window if there's no other widgets to extend the window width, you may need to set a size using [ImGuiSetNextWindowSize].

View File

@ -1,6 +1,6 @@
Initialize the Input system. Must be invoked before any call to [WindowSystemInit] to work properly.
```python
hg.InputInit()
hg.WindowSystemInit()
Initialize the Input system. Must be invoked before any call to [WindowSystemInit] to work properly.
```python
hg.InputInit()
hg.WindowSystemInit()
```

View File

@ -1,8 +1,8 @@
Create a projection frustum. This object can then be used to perform culling using [TestVisibility].
```python
# Compute a perspective matrix
proj = hg.ComputePerspectiveProjectionMatrix(0.1, 1000, hg.FovToZoomFactor(math.pi/4), 1280/720)
# Make a frustum from this projection matrix
frustum = hg.MakeFrustum(proj)
Create a projection frustum. This object can then be used to perform culling using [TestVisibility].
```python
# Compute a perspective matrix
proj = hg.ComputePerspectiveProjectionMatrix(0.1, 1000, hg.FovToZoomFactor(math.pi/4), 1280/720)
# Make a frustum from this projection matrix
frustum = hg.MakeFrustum(proj)
```

View File

@ -1,2 +1,2 @@
Set the [Camera] component of a node.
Set the [Camera] component of a node.
See [Scene_CreateCamera].

View File

@ -1,3 +1,3 @@
Set the [Light] component of a node.
Set the [Light] component of a node.
See [Scene_CreateLight], [Scene_CreatePointLight], [Scene_CreateSpotLight] or [Scene_CreateLinearLight].

View File

@ -1,3 +1,3 @@
Set the [LuaScript] component of a node.
Set the [LuaScript] component of a node.
See [Scene_CreateLuaScript].

View File

@ -1,3 +1,3 @@
Set the [Object] component of a node.
Set the [Object] component of a node.
See [Scene_CreateObject].

View File

@ -1,3 +1,3 @@
Set the [Transform] component of a node.
Set the [Transform] component of a node.
See [Scene_CreateTransform].

View File

@ -1,3 +1,3 @@
Open a file in binary mode.
Open a file in binary mode.
See [OpenText], [OpenWrite], [OpenWriteText]

View File

@ -1,3 +1,3 @@
Open a file as text. Return a handle to the opened file.
Open a file as text. Return a handle to the opened file.
See [Open], [OpenWrite], [OpenWriteText]

View File

@ -1,3 +1,3 @@
Compute the left and right eye view states from an OpenVR state.
Compute the left and right eye view states from an OpenVR state.
See [OpenVRGetState].

View File

@ -1,3 +1,3 @@
Submit the left and right eye textures to the OpenVR compositor.
Submit the left and right eye textures to the OpenVR compositor.
See [OpenVRCreateEyeFrameBuffer].

View File

@ -1,3 +1,3 @@
Open a file as binary in write mode.
Open a file as binary in write mode.
See [Open], [OpenText], [OpenWriteText]

View File

@ -1,3 +1,3 @@
Open a file as text in write mode.
Open a file as text in write mode.
See [Open], [OpenText], [OpenWrite]

View File

@ -1,6 +1,6 @@
Contains the result of a physics raycast.
* `P`: Position of the raycast hit
* `N`: Normal of the raycast hit
* `Node`: Node hit by the raycast
Contains the result of a physics raycast.
* `P`: Position of the raycast hit
* `N`: Normal of the raycast hit
* `Node`: Node hit by the raycast
* `t`: Parametric value of the intersection, ratio of the distance to the hit by the length of the raycast

View File

@ -1,22 +1,22 @@
Get a node by its absolute path in the node hierarchy.
A node path is constructed as follow:
- Nodes are refered to by their name.
- To address the child of a node, use the `/` delimiter between its parent name and the child name.
- To address a node inside an instance component, use the `:` delimiter.
- There is no limit on the number of delimiters you can use.
Examples:
Get the node named `child` parented to the `root` node.
```python
child = scene.GetNodeEx('root/child')
```
Get the node named `dummy` instantiated by the `root` node.
```python
dummy = my_scene.GetNodeEx('root:dummy')
```
Get a node by its absolute path in the node hierarchy.
A node path is constructed as follow:
- Nodes are refered to by their name.
- To address the child of a node, use the `/` delimiter between its parent name and the child name.
- To address a node inside an instance component, use the `:` delimiter.
- There is no limit on the number of delimiters you can use.
Examples:
Get the node named `child` parented to the `root` node.
```python
child = scene.GetNodeEx('root/child')
```
Get the node named `dummy` instantiated by the `root` node.
```python
dummy = my_scene.GetNodeEx('root:dummy')
```

3
doc/doc/SetSaturation.md Normal file
View File

@ -0,0 +1,3 @@
Return a copy of the input RGBA color with its saturation set to the specified value, alpha channel is left unmodified.
See [ToHLS] and [FromHLS].

View File

@ -1,3 +1,3 @@
Record the elapsed time since the last call to this function.
Advance the engine clock and return the elapsed time since the last call to this function. See [GetClock] to retrieve the current clock.
See [GetClockDt].

1
doc/doc/ToHLS.md Normal file
View File

@ -0,0 +1 @@
Convert input RGBA color to hue/luminance/saturation, alpha channel is left unmodified.

View File

@ -1,3 +1,3 @@
Submit an empty primitive to the view.
Submit an empty primitive to the view.
See [Frame].

View File

@ -1,3 +0,0 @@
.title Function/member index
%AllFunctionIndex%

View File

@ -1,76 +0,0 @@
.title An application using the render system
This page describes a complete Harfang application in Python displaying a triangle using the [RenderSystem].
.img("man.AnApplicationUsingTheRenderSystem.png")
The complete source for this application can be found in the [man.Tutorials].
## Program overview
To display a triangle using the render system we will need to:
1. Create the renderer and a render system wrapping it,
* Display the triangle in a loop until the end of execution condition is met.
Most steps of this program are explained in details in the [man.AnApplicationUsingTheRenderer] page.
## Creating the render system
This application uses [Renderer] and wraps it with [RenderSystem].
```python
# create the renderer
renderer = hg.CreateRenderer()
renderer.Open()
# open a new window
win = hg.NewWindow(480, 240)
# create a new output surface for the newly opened window
surface = renderer.NewOutputSurface(win)
renderer.SetOutputSurface(surface)
# initialize the render system, which is used to draw through the renderer
render_system = hg.RenderSystem()
render_system.Initialize(renderer)
```
The render system is ready to work.
## The application render loop
### A word on vertex transformation
Since we will be displaying the triangle using the render system we have less control over the shader that is going to be used. The render system core resources include shader to render all the common combination of vertex attributes.
However, unlike the the shader we used in the equivalent renderer program, _all render system shaders make use of the renderer ModelViewProjection matrix_. So we first need to initialize it.
For the purpose of this program a simple 2D projection system will do. The following call will set a projection matrix that maps vertex coordinate to pixels with (0;0) in the lower-left corner of the viewport with +X going right and +Y going up.
```python
renderer.Set2DMatrices()
```
### Drawing the triangle
The application loops until the default renderer window is closed and starts by clearing the render target to a solid green color.
```python
while hg.IsWindowOpen(win):
renderer.Clear(hg.Color.Green)
```
Next, we tell the render system to draw the triangle using the helper function it provides for this task.
```python
vertices = [hg.Vector3(0, 0, 0), hg.Vector3(0, 240, 0), hg.Vector3(480, 240, 0)]
render_system.DrawTriangleAuto(1, vertices, color)
```
Finally, the loop ends by showing the draw result and updating the renderer output window.
```python
hg.Frame()
hg.UpdateWindow(win)
```

View File

@ -1,159 +0,0 @@
.title An application using the renderer
This page describes a complete Harfang application in Python displaying a triangle using the [Renderer].
The complete source for this application can be found in the [man.Tutorials].
## Program overview
To display a triangle using the renderer we will need to:
1. Create the renderer,
* Create a window and ,
* Describe the geometry we intend to draw,
* Provide the geometry to the renderer along with a shader to draw it.
* Display the triangle in a loop until the end of execution condition is met.
## Creating the renderer
We first need to create an object of [Renderer] type. By default this application uses the OpenGL implementation of the renderer interface, but any other available implementation can be used in its place.
```python
renderer = hg.CreateRenderer()
renderer.Open()
```
The [Renderer] object is first created then its [Renderer_Open] member function is called. At this point no window is created.
## Creating the window
We create a new [Window] using the [NewWindow] function.
```python
win = hg.NewWindow(640, 480)
```
We create a [Surface] for the newly created window and set it as the [Renderer] new output surface.
```python
surface = renderer.NewOutputSurface(win)
renderer.SetOutputSurface(surface)
```
## Describing the geometry to draw
The [Renderer] API works at the lowest level of abstraction and uses [GpuBuffer] and [VertexLayout] together with a [Shader] to build and draw primitives.
### Index buffer
A triangle is build from 3 vertices which are connected in sequential order (vertex 0 to vertex 1 to vertex 2). So we need a vertex buffer of 3 vertices and an index buffer of 3 indexes containing the following values: 0, 1 and 2.
The index values need to be packed into a memory buffer before they can be send to the gpu. The [BinaryBlob] class can be used to this effect.
```python
data = gs.BinaryData()
data.WriteUInt16s([0, 1, 2]) # we use 16 bit packing of the index values by writing shorts
```
The index buffer is then very easily constructed from the binary blob.
```python
idx = renderer.NewBuffer()
renderer.CreateBuffer(idx, data, hg.GpuBufferIndex)
```
### Vertex buffer
A vertex can be made of any number of attributes (position, color, UV, etc...) which are then fed into the shader used to draw the primitives. To specify the layout of a vertex buffer we use the [VertexLayout] class.
The triangle we will display is the simplest one possible, displaying a single color provided as a shader parameter, so its vertices only need a position attribute. The position of each vertex will be stored using 3 float values.
```python
vtx_layout = hg.VertexLayout()
vtx_layout.AddAttribute(hg.VertexPosition, 3, hg.VertexFloat)
```
We then prepare the vertex buffer content using another binary blob.
```python
data = hg.BinaryData()
data.WriteFloats([-0.5, -0.5, 0.5, -0.5, 0.5, 0.5, 0.5, 0.5, 0.5])
```
We can now create the vertex buffer.
```python
vtx = renderer.NewBuffer()
renderer.CreateBuffer(vtx, data, hg.GpuBufferVertex)
```
The buffers are complete and ready to render.
## Create a shader to render the geometry
The shader we use to display the triangle takes the vertex position as it is and output the same color for each pixel. The output color is an input parameter named `u_color` which we will programmatically change.
```glsl
in { vec4 u_color; }
variant {
vertex {
out { vec4 v_color; }
source %{
v_color = u_color;
%out.position% = vec4(vPosition, 1.0);
%}
}
pixel {
in { vec4 v_color; }
source %{
%out.color% = v_color;
%}
}
}
```
For more information on the shader structure, refer to the [man.Shader] page.
To load the shader from a file we first mount a file driver (cf. [man.Assets]).
```python
hg.MountFileDriver(hg.StdFileDriver())
shader_path = os.path.join(os.getcwd(), "../_data/shader_2d_color.isl")
shader = renderer.LoadShader(shader_path)
```
**Note:** A default value for `u_color` can be specified in the shader declaration so that we do not have to set it programmatically.
## The application render loop
Everything is now ready so we enter the main application loop in which we render the triangle. The application loops until the default renderer window is closed and starts by clearing the render target to a solid red color.
```python
while hg.IsWindowOpen(win):
renderer.Clear(hg.Color.Red)
```
Next, the shader and its `u_color` input value are set and the index/vertex buffers are drawn to the render target.
```python
renderer.SetShader(shader)
renderer.SetShaderFloat4("u_color", 0, 1, 0, 1)
hg.DrawBuffers(renderer, 3, idx, vtx, vtx_layout)
```
**Note:** The [DrawBuffers] call specifies the drawing of 3 indexes and uses the default value for the index type and primitive type. Those are 16 bit indexes and triangle primitives.
Finally, the loop ends by commiting the draw call, showing the draw result and updating the renderer output window.
```python
renderer.DrawFrame()
renderer.ShowFrame()
hg.UpdateWindow(win)
hg.EndFrame()
```

View File

@ -1 +0,0 @@
.title Architecture

View File

@ -1,5 +0,0 @@
.title Overview
Assemble is a scene editor for the Harfang library written in C++ using the DearImGui library.
It can be used to prepare assets for your Harfang programs or as a stand-alone content creation tool with publishing capabilities.

View File

@ -1,46 +1,46 @@
.title Compiling to Assets
Compiling project resources into assets is done using the `assetc` command-line tool.
Upon invocation, it will scan the input folder and compile all resources in a supported format to the output folder. Files in an unsupported format are copied unmodified to the output folder.
*It is very important that you treat the compiled output folder as entirely disposable* and *only ever* perform modifications in the input folder. Output assets will be different for each platform you compile to.
Again, *do not ever work in the assets folder*.
If you are unclear on the resources/assets distinction see [man.Assets].
## Drag & drop
The easiest way is to drag and drop the resources folder on the assetc executable:
![assetc drag & drop](../img/assetc.gif)
## Command-Line
If you need more control over the compilation options, the command line gets the following parameters:
```
assetc <input> [output PATH] [-daemon] [-platform PLATFORM] [-api API] [-defines DEFINES] [-job COUNT]
[-toolchain PATH] [-progress] [-log_to_std_out] [-debug] [-quiet] [-verbose]
```
Option | Shortcut | Description
-------|----------|------------
`-input` | | Input project resources folder.
`-output` | | Output compiled assets folder. If unspecified, the input folder path suffixed with `_compiled` is used.
`-daemon` | `-d` | Run the compiler in daemon mode. The compiler will constantly monitor the input folder and compile its content as it is modified.
`-platform` | `-p` | Platform to target.
`-api` | | Graphics API to target. Some platforms (eg. PC) might support multiple graphics API (DX11, DX12, GL, ...).
`-defines` | `-D` | Semicolon separated defines to pass to the shader compiler (eg. FLAG;VALUE=2).
`-job` | `-j` | Maximum number of parallel job (0 - automatic).
`-toolchain` | `-t` | Path to the toolchain folder.
`-progress` | | Output progress to the standard output.
`-log_to_std_out` | `-l` | Log errors to the standard output.
`-debug` | | Compile in debug mode (eg. output debug informations in shader).
`-quiet` | `-q` | Disable all build information but errors.
`-verbose` | `-v` | Output additional information about the compilation process.
*Note:* When run in daemon mode `assetc` will not exit after its initial run and will keep watching the input folder. When a resource is modified it will automatically be compiled to the output folder.
See [man.GLTF] and [man.FBX] to convert common 3d formats to Harfang resources.
.title Compiling to Assets
Compiling project resources into assets is done using the `assetc` command-line tool.
Upon invocation, it will scan the input folder and compile all resources in a supported format to the output folder. Files in an unsupported format are copied unmodified to the output folder.
*It is very important that you treat the compiled output folder as entirely disposable* and *only ever* perform modifications in the input folder. Output assets will be different for each platform you compile to.
Again, *do not ever work in the assets folder*.
If you are unclear on the resources/assets distinction see [man.Assets].
## Drag & drop
The easiest way is to drag and drop the resources folder on the assetc executable:
![assetc drag & drop](../img/assetc.gif)
## Command-Line
If you need more control over the compilation options, the command line gets the following parameters:
```
assetc <input> [output PATH] [-daemon] [-platform PLATFORM] [-api API] [-defines DEFINES] [-job COUNT]
[-toolchain PATH] [-progress] [-log_to_std_out] [-debug] [-quiet] [-verbose]
```
Option | Shortcut | Description
-------|----------|------------
`-input` | | Input project resources folder.
`-output` | | Output compiled assets folder. If unspecified, the input folder path suffixed with `_compiled` is used.
`-daemon` | `-d` | Run the compiler in daemon mode. The compiler will constantly monitor the input folder and compile its content as it is modified.
`-platform` | `-p` | Platform to target.
`-api` | | Graphics API to target. Some platforms (eg. PC) might support multiple graphics API (DX11, DX12, GL, ...).
`-defines` | `-D` | Semicolon separated defines to pass to the shader compiler (eg. FLAG;VALUE=2).
`-job` | `-j` | Maximum number of parallel job (0 - automatic).
`-toolchain` | `-t` | Path to the toolchain folder.
`-progress` | | Output progress to the standard output.
`-log_to_std_out` | `-l` | Log errors to the standard output.
`-debug` | | Compile in debug mode (eg. output debug informations in shader).
`-quiet` | `-q` | Disable all build information but errors.
`-verbose` | `-v` | Output additional information about the compilation process.
*Note:* When run in daemon mode `assetc` will not exit after its initial run and will keep watching the input folder. When a resource is modified it will automatically be compiled to the output folder.
See [man.GLTF] and [man.FBX] to convert common 3d formats to Harfang resources.

View File

@ -1,14 +1,14 @@
.title Resources & Assets
By convention, we call production files: **resources** (eg. *the project resources*).
By convention, production files are called: **resources** (eg. *the project resources*).
Files issued from the compilation of production files for a specific target are called: **assets** (eg. *the project assets for iOS*).
Files issued from the compilation of production files for a specific target are called: **assets** (eg. *the project assets for Windows PC*).
[TOC]
## Resource Formats
During development resources are stored in *production formats*, these formats are meant for efficient editing. Before they can be loaded at runtime, resources must be compiled into their *runtime formats* as assets which are specific to and optimized for the target platform.
During development resources are stored in *production formats*, these formats are meant for efficient editing. Before they can be loaded at runtime, resources must be compiled into their *runtime formats* as assets which are specific to the target platform.
To compile a project resources use:

View File

@ -16,6 +16,20 @@ Harfang 2.0.0 for CPython 3.2+ on windows-x64 (build ba08463ee9e6c0c93960230fb88
See http://harfang3d.com/license for licensing terms
```
### Troubleshooting
> _`pip install` fails with a message saying `harfang is not a supported wheel on this platform`._
>
> Make sure that your pip install is up to date. Outdated pip versions have been known to cause such problems.
> _The dynamic library fails to load when importing the `harfang` module in Python._
>
> Make sure your system has the required runtime dependencies installed. It should have OpenAL and on Windows the Visual C++ 2017 redistributable installed.
> _`ImportError: DLL load failed: %1 is not a valid Win32 application.` error when importing the `harfang` module._
>
> This error usually happens when installing the incorrect version of Harfang for your Python version. For example when installing the 64 bit version of Harfang on a 32 bit install of the Python interpreter.
## First Program
Let's write a simple test program, create a new file named `test.py` and paste the following code into it.

View File

@ -1,3 +0,0 @@
.title API Classes
%ClassIndex%

View File

@ -1,5 +0,0 @@
.title Components
## Node components
[Transform], [Object], [Light], [Camera], [LuaScript], [RigidBody] and [Collision].

View File

@ -1,5 +0,0 @@
.title API Constants
%GlobalConstantsIndex%
%GlobalConstantsDocumentation%

View File

@ -1,3 +0,0 @@
.title Core runtime resources
In order to properly work the Harfang library needs to access a number of core resources.

View File

@ -1,41 +0,0 @@
.title DearImGui
Harfang embeds the [dear imGui](https://github.com/ocornut/imgui) library.
Dear imgui is an immediate GUI library designed to quickly build debugging/profiling user interface.
It is available at all time through the ImGui* function and only requires a [Renderer] instance to work. Only a single instance of the library is available and its output is displayed right before executing a [Renderer_ShowFrame] call.
Accessing ImGui from multiple threads require synchronization using the [ImGuiLock] and [ImGuiUnlock] functions.
### Minimal sample code showing a window
```python
import harfang as hg
hg.LoadPlugins()
renderer = hg.CreateRenderer()
renderer.Open()
win = hg.NewWindow(640, 480)
surface = renderer.NewOutputSurface(win)
renderer.SetOutputSurface(surface)
hg.ImGuiSetOutputSurface(surface)
while True:
hg.ImGuiBegin("window"):
hg.ImGuiEnd()
renderer.Clear(hg.Color.Red)
renderer.ShowFrame()
hg.UpdateWindow(win)
hg.EndFrame()
renderer.DestroyOutputSurface(surface)
hg.DestroyWindow(win)
renderer.Close()
```

View File

@ -1,13 +0,0 @@
.title Debugging
## Debugging general issues
The first thing to check when your program fails is the engine log output. The log output sends all engine debugging messages to the console.
The log system defaults to only displaying warning and error level messages. Enabling debug and standard level messages by calling `hg.SetLogLevel(hg.LogAll)` (see [SetLogLevel]) will simplify identifying why something is going wrong.
Complex error messages might include detailed information on the error. By default, details are filtered out by the system and must be enabled using `hg.SetLogIsDetailed(True)` (see [SetLogIsDetailed]).
## Debugging scene issues
A scene is to complex an object to debug through the log system. The engine debugger includes a scene debugger which is the perfect tool for dwelving into the data structures of a scene. Refer to the [man.EngineDebugger] manual page for how to use it.

View File

@ -1,76 +0,0 @@
.title Drawing graphic primitives
This page describes a complete Harfang application in Python displaying lines, triangles and polygons using low level graphics functionnalities.
.img("man.AnApplicationUsingTheRenderSystem.png")
The complete source for this application can be found in the [man.Tutorials].
## Program overview
To display a triangle using the render system we will need to:
1. Create the renderer and a render system wrapping it,
* Display the triangle in a loop until the end of execution condition is met.
Most steps of this program are explained in details in the [man.AnApplicationUsingTheRenderer] page.
## Creating the render system
This application uses [Renderer] and wraps it with [RenderSystem].
```python
# create the renderer
renderer = hg.CreateRenderer()
renderer.Open()
# open a new window
win = hg.NewWindow(480, 240)
# create a new output surface for the newly opened window
surface = renderer.NewOutputSurface(win)
renderer.SetOutputSurface(surface)
# initialize the render system, which is used to draw through the renderer
render_system = hg.RenderSystem()
render_system.Initialize(renderer)
```
The render system is ready to work.
## The application render loop
### A word on vertex transformation
Since we will be displaying the triangle using the render system we have less control over the shader that is going to be used. The render system core resources include shader to render all the common combination of vertex attributes.
However, unlike the the shader we used in the equivalent renderer program, _all render system shaders make use of the renderer ModelViewProjection matrix_. So we first need to initialize it.
For the purpose of this program a simple 2D projection system will do. The following call will set a projection matrix that maps vertex coordinate to pixels with (0;0) in the lower-left corner of the viewport with +X going right and +Y going up.
```python
renderer.Set2DMatrices()
```
### Drawing the triangle
The application loops until the default renderer window is closed and starts by clearing the render target to a solid green color.
```python
while hg.IsWindowOpen(win):
renderer.Clear(hg.Color.Green)
```
Next, we tell the render system to draw the triangle using the helper function it provides for this task.
```python
vertices = [hg.Vector3(0, 0, 0), hg.Vector3(0, 240, 0), hg.Vector3(480, 240, 0)]
render_system.DrawTriangleAuto(1, vertices, color)
```
Finally, the loop ends by showing the draw result and updating the renderer output window.
```python
hg.Frame()
hg.UpdateWindow(win)
```

View File

@ -1,24 +0,0 @@
.title Engine debugger
Harfang integrates a debugger written in [man.Dearimgui]. The debugger can be used to inspect, debug and profile many systems of the engine at runtime.
Use the [SetEnableDebugger] function to enable and disable the debugger. The debugger interface will overlay itself over your program output before each call to [Renderer_ShowFrame].
## Engine systems
The debugger monitors the following engine systems.
* **Renderer:** Statistics for the current [Renderer].
* **Render system:** Statistics for the current [RenderSystem].
* **Texture cache:** Display the content of the engine texture cache.
* **Geometry cache:** Display the content of the engine geometry cache.
* **Material cache:** Display the content of the engine material cache.
* **Log window:** Display the engine log output.
## Scene debugger
.img("man.scene_debugger.jpg")
The debugger tracks all scene creation and deletion and keeps a list of available scene in the *Scene debugger* menu. You can select a specific scene to monitor or select the *automatic* option from the *Scene debugger* menu to track the last displayed scene.
The scene debugger can display the full scene tree, inspect and modify [Node] and their [man.Component].

View File

@ -1,5 +0,0 @@
.title API Enumerations
%GlobalEnumIndex%
%GlobalEnumDocumentation%

View File

@ -1,7 +0,0 @@
.title Examples
## Note on code examples
Most examples presented in this manual are written in Python.
The API exposed to both languages being identical, adapting the Python examples to Lua usually requires little more than language grammar change.

View File

@ -1,4 +0,0 @@
.title Extending the editor
* [man.ExtendingTheProjectExplorer]
* [man.ExtendingTheScenePlugin]

View File

@ -1,14 +0,0 @@
.title Extending the project explorer
## Writing a project explorer plugin
A scene tool plugin must be declared as a class extending the `plugin.IProjectExplorerPlugin` interface.
```python
class Plugin(IProjectExplorerPlugin):
""" A new project explorer plugin """
```
The following methods must be implemented by the plugin:
* `process_drop_event(dropped_urls, target_url)`: Process a drop event over the project explorer. Return `plugin.InterruptPluginChain` to stop execution at your plugin.

View File

@ -1,24 +0,0 @@
.title Extending the scene plugin
## Writing a scene tool plugin
A scene tool plugin must be declared as a class extending the `plugin.ISceneToolPlugin` interface.
```python
class Plugin(ISceneToolPlugin):
""" New Scene plugin """
```
The following methods must be implemented by the plugin:
* `on_selection_changed(selection)`: Called by the scene plugin whenever the selection is changed. The complete new selection is passed to the plugin.
* `on_node_selection_changed(node_selection)`: Same as above but a list of the scene nodes in the new selection is passed to the plugin.
* `on_frame_complete()`: This function is called when the current frame is complete. The plugin is given a chance to draw additional content at this point.
**Note:** This call is done from the rendering thread. The synchronous renderer object [^1] can be used from this location. The renderer matrix stack is automatically saved and restored around this call.
* `on_mouse_event(event, mouse, dt_frame)`: Called whenever a mouse event happens over the viewport.
[^1]: Available through the `engine.renderer` symbol.
## Mouse events
TODO

View File

@ -1,51 +0,0 @@
.title Feature List
### General
* Cross-platform
* Lightweight code base
* Small memory footprint
### Interoperability
* Command line FBX converter (extensive support including geometry skinning)
* Native support for many image file formats (PSD, JPG, PNG, TGA, ...)
* Native support for many sound file formats (OGG, WAV, AIFF, XM, S3M, ...)
### Framework
* Flexible file system abstraction (local, archive, network components with chaining)
* HID abstraction to access machine devices (DirectInput, XInput, ...)
* Data format abstraction (XML/JSON/Binary back-ends)
* 2D Vector graphics engine based on the Anti Grain Geometry library
### Multi-threading
* Task-based multi-threading
* Asynchronous interfaces to control key API objects from any thread or language
### Audio
* Audio API abstraction layer (OpenAL back-end)
* Any supported audio format can be streamed or loaded as a sound
* 3D audio support
### Rendering
* GPU-accelerated
* Graphic API abstraction layer (OpenGL 3.3/ES 2.0 & DirectX 11 back-ends)
* Shader-based rendering
* Draw TTF text to screen
### Scene
* Complete scene management
* Component/system architecture
* GPU skinning
* Light component with shadow mapping
* Post-processing (motion blur, depth of field, ...)
* Bullet/PhysX 3 physics system
* Recast/Detour navigation system
* Create new component using Lua scripts
* Multiple Lua scripts can run in parallel
* Each scene system executes tasks in parallel using a lock-free stepping algorithm

View File

@ -1,5 +0,0 @@
.title API Functions
%GlobalFunctionIndex%
%GlobalFunctionDocumentation%

View File

@ -1,152 +0,0 @@
.title Generated textured cube
The [Geometry] class contains the functions needed to generate meshes.
## Generated textured cube
To generate a textured cube using [Geometry] API, you need the following functions:
### Vertices
* [Geometry_AllocateVertex] : Set the number of vertices.
* [Geometry_SetVertex] : Set vertex coordinates.
### Polygons
* [Geometry_AllocatePolygon] : Set number of polygons.
* [Geometry_SetPolygon] : Set the number of vertices and the polygon material.
* [Geometry_AllocatePolygonBinding] : Allocate memory to store the geometry polygon binding table.
* [Geometry_SetPolygonBinding] : Set the polygon binding table.
### Normals
* [Geometry_AllocateVertexNormal] : Set the number of normals.
* [Geometry_SetVertexNormal] : Set normal coordinates.
### UVs
* [Geometry_AllocateUVChannels] : Set number of UVs.
* [Geometry_SetUV] : Set UV coordinates.
### Materials
* [Geometry_AllocateMaterialTable] : Set number of materials.
* [Geometry_SetMaterial] : Set material definition file path.
## Code example
### Vertices and polygons
```python
cube = hg.Geometry()
# Create vertex
s = hg.Vector3(1,1,1) # dimensions
cube.AllocateVertex(8)
ube.SetVertex(0, hg.Vector3(-s.x, -s.y, -s.z))
cube.SetVertex(1, hg.Vector3(-s.x, -s.y, s.z))
cube.SetVertex(2, hg.Vector3(-s.x, s.y, -s.z))
cube.SetVertex(3, hg.Vector3(-s.x, s.y, s.z))
cube.SetVertex(4, hg.Vector3(s.x, -s.y, -s.z))
cube.SetVertex(5, hg.Vector3(s.x, -s.y, s.z))
cube.SetVertex(6, hg.Vector3(s.x, s.y, -s.z))
cube.SetVertex(7, hg.Vector3(s.x, s.y, s.z))
# Create polygons
cube.AllocatePolygon(6)
cube.SetPolygon(0, 4, 0)
cube.SetPolygon(1, 4, 1)
cube.SetPolygon(2, 4, 2)
cube.SetPolygon(3, 4, 3)
cube.SetPolygon(4, 4, 4)
cube.SetPolygon(5, 4, 5)
# Polygons bindings
cube.AllocatePolygonBinding()
cube.SetPolygonBinding(0, hg.IntList([0, 2, 6, 4]))
cube.SetPolygonBinding(1, hg.IntList([4, 6, 7, 5]))
cube.SetPolygonBinding(2, hg.IntList([5, 7, 3, 1]))
cube.SetPolygonBinding(3, hg.IntList([1, 3, 2, 0]))
cube.SetPolygonBinding(4, hg.IntList([2, 3, 7, 6]))
cube.SetPolygonBinding(5, hg.IntList([4, 5, 1, 0]))
```
### Normals. Each vertex for each polygon has a normal. So, 6 polygons X 4 vertices = 24 normals
```python
# Normals
cube.AllocateVertexNormal(24)
cube.SetVertexNormal(0, hg.Vector3(0, 0, -1))
cube.SetVertexNormal(1, hg.Vector3(0, 0, -1))
cube.SetVertexNormal(2, hg.Vector3(0, 0, -1))
cube.SetVertexNormal(3, hg.Vector3(0, 0, -1))
cube.SetVertexNormal(4, hg.Vector3(1, 0, 0))
cube.SetVertexNormal(5, hg.Vector3(1, 0, 0))
cube.SetVertexNormal(6, hg.Vector3(1, 0, 0))
cube.SetVertexNormal(7, hg.Vector3(1, 0, 0))
cube.SetVertexNormal(8, hg.Vector3(0, 0, 1))
cube.SetVertexNormal(9, hg.Vector3(0, 0, 1))
cube.SetVertexNormal(10, hg.Vector3(0, 0, 1))
cube.SetVertexNormal(11, hg.Vector3(0, 0, 1))
cube.SetVertexNormal(12, hg.Vector3(-1, 0, 0))
cube.SetVertexNormal(13, hg.Vector3(-1, 0, 0))
cube.SetVertexNormal(14, hg.Vector3(-1, 0, 0))
cube.SetVertexNormal(15, hg.Vector3(-1, 0, 0))
cube.SetVertexNormal(16, hg.Vector3(0, 1, 0))
cube.SetVertexNormal(17, hg.Vector3(0, 1, 0))
cube.SetVertexNormal(18, hg.Vector3(0, 1, 0))
cube.SetVertexNormal(19, hg.Vector3(0, 1, 0))
cube.SetVertexNormal(20, hg.Vector3(0, -1, 0))
cube.SetVertexNormal(21, hg.Vector3(0, -1, 0))
cube.SetVertexNormal(22, hg.Vector3(0, -1, 0))
cube.SetVertexNormal(23, hg.Vector3(0, -1, 0))
```
### UVs and materials
```python
# Create UVs
cube.AllocateUVChannels(1, 24)
cube.SetUV(0, 0, hg.Vector2List([hg.Vector2(0, 0), hg.Vector2(0, 1), hg.Vector2(1, 1), hg.Vector2(1, 0)]))
cube.SetUV(0, 1, hg.Vector2List([hg.Vector2(0, 0), hg.Vector2(0, 1), hg.Vector2(1, 1), hg.Vector2(1, 0)]))
cube.SetUV(0, 2, hg.Vector2List([hg.Vector2(0, 0), hg.Vector2(0, 1), hg.Vector2(1, 1), hg.Vector2(1, 0)]))
cube.SetUV(0, 3, hg.Vector2List([hg.Vector2(0, 0), hg.Vector2(0, 1), hg.Vector2(1, 1), hg.Vector2(1, 0)]))
cube.SetUV(0, 4, hg.Vector2List([hg.Vector2(0, 0), hg.Vector2(0, 1), hg.Vector2(1, 1), hg.Vector2(1, 0)]))
cube.SetUV(0, 5, hg.Vector2List([hg.Vector2(0, 0), hg.Vector2(0, 1), hg.Vector2(1, 1), hg.Vector2(1, 0)]))
# Create materials
cube.AllocateMaterialTable(6)
cube.SetMaterial(0, "assets/materials/face1.mat")
cube.SetMaterial(1, "assets/materials/face2.mat")
cube.SetMaterial(2, "assets/materials/face3.mat")
cube.SetMaterial(3, "assets/materials/face4.mat")
cube.SetMaterial(4, "assets/materials/face5.mat")
cube.SetMaterial(5, "assets/materials/face6.mat")
```
### Validate Geometry and create Node
After having determined the geometry, it's possible to validate your structure. If all is ok, you can create [Object] and [Node].
```python
if cube.Validate():
geo = plus.GetRenderSystem().CreateGeometry(cube,False)
obj = hg.Object()
obj.SetGeometry(geo)
node = hg.Node()
node.SetName("generated_cube")
transform = hg.Transform(hg.Vector3(0,3,0))
node.AddComponent(transform)
node.AddComponent(obj)
scene.AddNode(node)
return node
```

View File

@ -1,73 +0,0 @@
.title Installation
## Quick Install
* **Using PIP in a command line:** `pip install harfang`
* **Or download the wheel from :** [Downloads](https://www.harfang3d.com/downloads)
If anything goes wrong, please look at the [TroubleShooting](#TroubleShooting) section.<br/>
Further details on the installation are available below.
## Prerequisites
The following dependencies must be installed on your system for any Harfang project to work properly.
* Functional OpenGL 3.3 hardware and drivers
### Windows
* OpenAL redistributable (`oalinst.exe`)
* Visual C++ 2017 redistributable (`vcredist.exe`)
### Linux
* OpenAL (`sudo apt-get install libopenal1`)
## Installation
Harfang is available for several programming languages as an extension or as a standalone executable. The following sections describe the installation procedure for each variant.
### Python
* Download the `.whl` package for your OS and Python version. ([Downloads](https://www.harfang3d.com/downloads))
#### Windows
1. Open a command prompt as Administrator (`Win+X` then `Command Prompt (Admin)`).
1. Switch to the download directory and execute `pip install <your_harfang_version>.whl --user`.
#### OSX
1. Open a terminal window (in `Applications/Utilities/Terminal.app`).
1. Switch to the download directory and execute `pip install <your_harfang_version>.whl --user`.
#### Linux
1. Open a terminal window.
1. Switch to the download directory and execute `pip install <your_harfang_version>.whl --user`.
**Note:** You might need to explicitly use `pip3` to install the module if your system has both Python 2 and 3 installed.
#### Confirm your installation ####
Confirm your installation by starting your Python 3 interpreter and execute the following statement `import harfang as hg`. If you receive no error message, the installation was successful.
### Lua
Deploy the binary extension to your Lua interpreter or use the provided interpreter.
## <a name="Troubleshooting"></a>Troubleshooting
### Python
#### 1. `pip install` fails with a message saying `harfang is not a supported wheel on this platform`.
Make sure that your pip install is up to date. Outdated pip versions have been known to cause such problems.
#### 2. The dynamic library fails to load when importing the `harfang` module in Python.
Make sure your system has the required runtime dependencies installed. It should have OpenAL and on Windows the Visual C++ 2017 redistributable installed.
#### 3. `ImportError: DLL load failed: %1 is not a valid Win32 application.` error when importing the `harfang` module.
This error usually happens when installing the incorrect version of Harfang for your Python version. For example when installing the 64 bit version of Harfang on a 32 bit install of the Python interpreter.

View File

@ -1,19 +1,19 @@
.title Overview
Harfang is a high-level software library to create applications that display 2D/3D visuals and play sound/music.
It provides a unified API to write programs using different programming languages and is available for Windows, OSX and Linux (Debian/Ubuntu).
## Supported Programming Languages
For installation instructions for each supported language please refer to the corresponding manual page:
* [man.CPython]
* [man.Lua]
## Getting Help
* For generic programming issues:
- [Stack Overflow](http://stackoverflow.com/)
* For game development related issues:
- [gamedev.net](http://www.gamedev.net)
.title Overview
Harfang is a high-level software library to create applications that display 2D/3D visuals and play sound/music.
It provides a unified API to write programs using different programming languages and is available for Windows and Linux (for more details, please refer to the [man.Requirements]).
## Supported Programming Languages
For installation instructions for each supported language please refer to the corresponding manual page:
* [man.CPython]
* [man.Lua]
## Getting Help
* For generic programming issues:
- [Stack Overflow](http://stackoverflow.com/)
* For game development related issues:
- [gamedev.net](http://www.gamedev.net)

View File

@ -23,19 +23,19 @@ The rigid body intertia tensor is computed from its collision shape properties.
## Simulating Physics
Create a physics backend such as [SceneNewtonPhysics] and call [SceneNewtonPhysics_SceneCreatePhysicsFromAssets] to create the physics states corresponding to the scene declaration.
Create a physics backend such as [SceneNewtonPhysics] and call [SceneBullet3Physics_SceneCreatePhysicsFromAssets] to create the physics states corresponding to the scene declaration.
*Note:* [SceneNewtonPhysics_SceneCreatePhysicsFromAssets] means that if setting up the physics states requires access to an external resource, such as a mesh, it should be loaded from the assets system. If you are working from the filesystem, use [SceneNewtonPhysics_SceneCreatePhysicsFromFile].
*Note:* [SceneBullet3Physics_SceneCreatePhysicsFromAssets] means that if setting up the physics states requires access to an external resource, such as a mesh, it should be loaded from the assets system. If you are working from the filesystem, use [SceneBullet3Physics_SceneCreatePhysicsFromFile].
### Running the Simulation
This involves 3 steps on each update:
1. Synchronize physics state with the scene declaration using [SceneNewtonPhysics_SceneCreatePhysicsFromAssets]. Alternatively, you can use a more fine-grained approach using [SceneNewtonPhysics_NodeCreatePhysicsFromAssets] to improve performance.
2. Step the simulation using [SceneNewtonPhysics_StepSimulation].
3. Synchronize the updated physics transformations to the scene using [SceneNewtonPhysics_SyncDynamicBodiesToScene].
1. Synchronize physics state with the scene declaration using [SceneBullet3Physics_SceneCreatePhysicsFromAssets]. Alternatively, you can use a more fine-grained approach using [SceneBullet3Physics_NodeCreatePhysicsFromAssets] to improve performance.
2. Step the simulation using [SceneBullet3Physics_StepSimulation].
3. Synchronize the updated physics transformations to the scene using [SceneBullet3Physics_SyncDynamicBodiesToScene].
*Note:* If you are using kinematic bodies you will also need to synchronize them from their node transformation on each update using [SceneNewtonPhysics_SyncKinematicBodiesFromScene].
*Note:* If you are using kinematic bodies you will also need to synchronize them from their node transformation on each update using [SceneBullet3Physics_SyncKinematicBodiesFromScene].
### The Easy Way
@ -45,7 +45,7 @@ When using a script system, this function will also dispatch collision events to
## Keeping the System Synchronized
Call the physics system garbage collect method (eg. [SceneNewtonPhysics_GarbageCollect]) on each update to ensure that destroyed nodes or components are properly removed. If you know that no node or component was destroyed during a particular update, not calling the garbage collector will save on performance.
Call the physics system garbage collect method (eg. [SceneBullet3Physics_GarbageCollect]) on each update to ensure that destroyed nodes or components are properly removed. If you know that no node or component was destroyed during a particular update, not calling the garbage collector will save on performance.
## Reading Physics Transformation

View File

@ -1,140 +0,0 @@
.title Post processing
Post processing occures after main scene rendering in order to add more realistic effects (motion blur, ambient occlusion...)
.img("post_process_pipeline.png")
## Post process
* [BloomPostProcess]
* [ChromaticDispersionPostProcess]
* [HSLPostProcess]
* [MotionBlurPostProcess]
* [RadialBlurPostProcess]
* [SAOPostProcess]
* [SharpenPostProcess]
### Usage (python)
All post-processings have the same init/remove procedures:
```
import harfang as hg
...
camera = scene.GetCurrentCamera()
post_process = hg.BloomPostProcess()
camera.AddComponent(post_process)
...
camera.RemoveComponent(post_process)
...
```
_In that exemple we use "BloomPostProcess", but it could be "ChromaticDispersionPostProcess", "HSLPostProcess", and so on..._
## Post process Stack
You can add as much PostProcessComponent as you like to the Camera.
It will be executed in the order of appearance in the Node's stack: this is the **Post Process stack**.
The order of execution of the post-processes is important:
.img("post_process_stack.png")
---
## BloomPostProcess
Drops a glow around enlighten areas. This effect enhances the impression of brightness.
_Bloom post-process:_
.img("bloom_01.png")
_No post-process:_
.img("no_post_process.png")
---
## ChromaticDispersionPostProcess
This filter works by independently offseting the red, green and blue components of the input image.
_Chromatic dispersion post-process:_
.img("chromatic_dispersion_01.png")
_No post-process:_
.img("no_post_process.png")
---
## HSLPostProcess
Post-process component implementing a Hue/Saturation/Brightness filter.
* **Hue:** add a circular shift to pixel colors.
* **Saturation:** set the colors strength (0: picture in grayscale).
* **Brightness:** Set the brightnes level (0 sets the screen to black). This can be used to fade-in / fade-out effect.
_HSL post-process:_
.img("HSL_01.png")
_No post-process:_
.img("no_post_process_camaro.png")
---
## MotioBlurPostProcess
This effect reproduce the famous cinematographic effect that blurs moving parts.
The faster the part, the blurrier it is. Motion-blur increases the realism of the rendering by softening the movements.
_Motion blur post-process:_
.img("motionblur_01.png")
_No post-process:_
.img("no_post_process_camaro.png")
---
## RadialBlurPostProcess
This effect blurs the pixels from a point of the screen. The further the pixels are from the point, the blurrier they are.
Unlike motion-blur, the radial-blur is independent of motion.
_Radial blur post-process:_
.img("radial_blur_01.png")
_No post-process:_
.img("no_post_process_camaro.png")
---
## SAOPostProcess
S.A.O for Screen-space Ambient Occlusion.
SAO is a fast real-time ambient occlusion rendering. It calculates pixels occlusions using the frame Z-Buffer.
It's an approximation of real ambient occlusion, but it's quite faster.
SAO is independant of scene complexity, as it works only on pixels datas (colors buffer & Z-Buffer).
_SAO post-process:_
.img("SAO_01.png")
_No post-process:_
.img("no_post_process_camaro.png")
---
## SharpenPostProcess
This effect reinforce contrasted edges using a convolution matrice.
_Sharpen post-process:_
.img("Sharpen_01.png")
_No post-process:_
.img("no_post_process.png")

View File

@ -1,12 +1,22 @@
.title Requirements
## System Requirements
* **GPU:** Graphic card with OpenGL 3.3 support.
* **CPU** and **memory** requirements are mostly dependent on your project characteristics.
A faster computer for development is recommended.
### Supported desktop OS
* Windows 7+, OSX 10.8+, Ubuntu 14.04+
.title Requirements
## System Requirements
* **GPU:** Graphic card with OpenGL 3.3, Direct3D 11 or OpenGL ES 3.1 support.
* **CPU** and **memory** requirements are mostly dependent on your project characteristics.
A faster computer for development is recommended.
### Supported desktop OS
* Windows 10+ (Intel)
* Ubuntu 20.04 LTS+ (Intel)
* Aarch Linux 64 (ARM)
### Window systems
* X11
* Wayland
### VR Support
* Via SteamVR (Windows only)

View File

@ -1,48 +1,48 @@
.title Working with Scene
A scene is a 3d world populated with [Node].
Nodes are container objects taking meaning through the use of components.
## Node & Components
Calling [Scene_CreateNode] returns an empty node with no component attached. In this state, it serves little to no purpose as it will not be drawn or implement any concrete behavior.
### Object
In order to be drawn a node must provide two essential informations:
- *A transformation:* This is done using [Node_SetTransform] to assign it a [Transform] component.
- *A visual representation:* This is done using [Node_SetObject] to assign it an [Object] component.
The object component accepts a [ModelRef] to a [Model] and holds a local list of [Material] used to draw the model. Each object component may hold different material definition for the same model.
The same components can be assigned to multiple nodes.
### Camera
Assign the [Camera] component to nodes to turn them into observers into the scene.
For more information on how this integrates with drawing a scene, see [man.DrawingScene].
### Light
Assign the [Light] component to nodes to turn them into light sources.
### Instance
Scenes can be instantiated in one another. This is useful to create multiple complex parts with their own animations or scripts from which you compose a larger world.
To instantiate a scene use [Node_SetInstance] to assign an [Instance] component to a node. To perform explicit instantiation use [Node_SetupInstanceFromAssets] or [Node_SetupInstanceFromFile].
*Note:* Instances are automatically setup when loading a scene.
After instantiation, the instance content is held in the host scene. [Node_GetInstanceSceneView] can be used to access it in isolation from the host content via the returned [SceneView] object.
## Managing Scene Resources
Most scene resources are returned by value as generational references (see [man.Ownership]) wrapped into helper classes such as [Node], [Camera], [Light] or [Object].
The scene has strong ownership of the resources it manages.
Nodes are explicitely destroyed using [Scene_DestroyNode] and components are implicitely destroyed using [Scene_GarbageCollect].
.title Working with Scene
A scene is a 3d world populated with [Node].
Nodes are container objects taking meaning through the use of components.
## Node & Components
Calling [Scene_CreateNode] returns an empty node with no component attached. In this state, it serves little to no purpose as it will not be drawn or implement any concrete behavior.
### Object
In order to be drawn a node must provide two essential informations:
- *A transformation:* This is done using [Node_SetTransform] to assign it a [Transform] component.
- *A visual representation:* This is done using [Node_SetObject] to assign it an [Object] component.
The object component accepts a [ModelRef] to a [Model] and holds a local list of [Material] used to draw the model. Each object component may hold different material definition for the same model.
The same components can be assigned to multiple nodes.
### Camera
Assign the [Camera] component to nodes to turn them into observers into the scene.
For more information on how this integrates with drawing a scene, see [man.DrawingScene].
### Light
Assign the [Light] component to nodes to turn them into light sources.
### Instance
Scenes can be instantiated in one another. This is useful to create multiple complex parts with their own animations or scripts from which you compose a larger world.
To instantiate a scene use [Node_SetInstance] to assign an [Instance] component to a node. To perform explicit instantiation use [Node_SetupInstanceFromAssets] or [Node_SetupInstanceFromFile].
*Note:* Instances are automatically setup when loading a scene.
After instantiation, the instance content is held in the host scene. [Node_GetInstanceSceneView] can be used to access it in isolation from the host content via the returned [SceneView] object.
## Managing Scene Resources
Most scene resources are returned by value as generational references (see [man.Ownership]) wrapped into helper classes such as [Node], [Camera], [Light] or [Object].
The scene has strong ownership of the resources it manages.
Nodes are explicitely destroyed using [Scene_DestroyNode] and components are implicitely destroyed using [Scene_GarbageCollect].

View File

@ -1,84 +1,84 @@
.title Scripting
Scripts can be used to extend the behavior of nodes and scenes.
[TOC]
## Host vs. Embedded VM
When using Harfang from a scripting language it can be difficult to differentiate between parts of your program running on you main script VM and parts of your program running on one of the supported embedded VMs.
We differentiate between those VMs by using the term *host VM* and *embedded VM*. For example, you may write a program in CPython which declares a scene extended using Lua scripts. In this case, CPython is the *host VM* and Lua is the *embedded VM*.
## Declaring Scripts
Create a [Script] component and assign it to a node or scene using [Node_SetScript] or [Scene_SetScript]. Set the path to the script source using [Script_SetPath].
## Creating & Evaluating Scripts
Create a backend such as [SceneLuaVM] and call [SceneLuaVM_SceneCreateScriptsFromAssets] to create the script states corresponding to the scene declaration. A [Script] component can be assigned to multiple nodes in which case they will all share the same execution environment.
You should then use the [SceneUpdateSystems] function to update both the scene and its systems. This function will update all the systems you pass to it and implement a default behavior that dispatch common events to script using a set of specific callbacks.
## Script Environment
The following symbols are defined when creating the environment for a script.
Symbol | Description
------ | -----------
G | Table shared by all script component created by the same scene.
hg | Access to the Harfang API.
scene | Scene object this component belongs to.
### Default Events & Callbacks
Node events reported by the default update behavior:
- *OnAttachToNode(Node node, int slot_index)*: A script component was attached to a node slot as a result of calling [Node_SetScript].
- *OnDetachFromNode(Node node, int slot_index)*: A script component was detached from a node slot as a result of calling [Node_RemoveScript].
- *OnDestroy()*: A script component is about to be destroyed and its memory released.
- *OnUpdate(Node node, time_ns dt)*: Called during a scene update for each node a script component is attached to.
- *OnCollision(Node a, Node b)*: Called when two node collide.
Scene events reported by the default update behavior:
- *OnDestroy()*: A script component is about to be destroyed and its memory released.
- *OnAttachToScene(scene, slot_idx)*: A script component was attached to a Scene slot as a result of calling [Scene_SetScript].
- *OnDetachFromScene(scene, slot_idx)*: A script component was detached from a Scene slot as a result of calling [Scene_RemoveScript].
- *OnUpdate(Scene scene, time_ns dt)*: Called during a scene update.
- *OnSubmitSceneToForwardPipeline(ViewId base_view_id, Scene scene, Rect rect, ViewState view_state, ForwardPipeline pipeline, PipelineResources, FramebufferHandle fb)*: Called at the end of a scene submission to the forward pipeline.
### Communicating with Scripts
Depending on the host and embedded VM used you may be able to access a script component environment directly (eg. [SceneLuaVM_GetScriptEnv]).
If this is not possible however this can be done using the backend get and set value methods from any host VM. In some cases the transfer will be automatic while in other cases you may need to explicitely marshall the outgoing/incoming values.
Source VM/Target VM | CPython | Lua
--------------|---------|-----
CPython | N/A | `LuaObject.Pack`/`LuaObject.Unpack`
Lua | N/A | -
Sending a value from a host CPython VM to an embedded Lua VM and back.
```python
# from host Python to embedded Lua
lua_vm.SetScriptValue('target_node', lua_vm.MakeLuaObject().Pack(node))
# from embedded Lua to host Python
target_node = lua_vm.GetScriptValue('target_node').Unpack()
```
Sending a value from a host Lua VM to an embedded Lua VM and back.
```lua
-- from host Lua to embedded Lua
lua_vm.SetScriptValue('target_node', node)
-- from embedded Lua to host Lua
node = lua_vm.GetScriptValue('target_node')
```
## Keeping the System Synchronized
Call the script system garbage collect method (eg. [SceneLuaVM_GarbageCollect]) on each update to ensure that destroyed nodes or components are properly removed. If you know that no node or component was destroyed during a particular update, not calling the garbage collector will save on performance.
.title Scripting
Scripts can be used to extend the behavior of nodes and scenes.
[TOC]
## Host vs. Embedded VM
When using Harfang from a scripting language it can be difficult to differentiate between parts of your program running on you main script VM and parts of your program running on one of the supported embedded VMs.
We differentiate between those VMs by using the term *host VM* and *embedded VM*. For example, you may write a program in CPython which declares a scene extended using Lua scripts. In this case, CPython is the *host VM* and Lua is the *embedded VM*.
## Declaring Scripts
Create a [Script] component and assign it to a node or scene using [Node_SetScript] or [Scene_SetScript]. Set the path to the script source using [Script_SetPath].
## Creating & Evaluating Scripts
Create a backend such as [SceneLuaVM] and call [SceneLuaVM_SceneCreateScriptsFromAssets] to create the script states corresponding to the scene declaration. A [Script] component can be assigned to multiple nodes in which case they will all share the same execution environment.
You should then use the [SceneUpdateSystems] function to update both the scene and its systems. This function will update all the systems you pass to it and implement a default behavior that dispatch common events to script using a set of specific callbacks.
## Script Environment
The following symbols are defined when creating the environment for a script.
Symbol | Description
------ | -----------
G | Table shared by all script component created by the same scene.
hg | Access to the Harfang API.
scene | Scene object this component belongs to.
### Default Events & Callbacks
Node events reported by the default update behavior:
- *OnAttachToNode(Node node, int slot_index)*: A script component was attached to a node slot as a result of calling [Node_SetScript].
- *OnDetachFromNode(Node node, int slot_index)*: A script component was detached from a node slot as a result of calling [Node_RemoveScript].
- *OnDestroy()*: A script component is about to be destroyed and its memory released.
- *OnUpdate(Node node, time_ns dt)*: Called during a scene update for each node a script component is attached to.
- *OnCollision(Node a, Node b)*: Called when two node collide.
Scene events reported by the default update behavior:
- *OnDestroy()*: A script component is about to be destroyed and its memory released.
- *OnAttachToScene(scene, slot_idx)*: A script component was attached to a Scene slot as a result of calling [Scene_SetScript].
- *OnDetachFromScene(scene, slot_idx)*: A script component was detached from a Scene slot as a result of calling [Scene_RemoveScript].
- *OnUpdate(Scene scene, time_ns dt)*: Called during a scene update.
- *OnSubmitSceneToForwardPipeline(ViewId base_view_id, Scene scene, Rect rect, ViewState view_state, ForwardPipeline pipeline, PipelineResources, FramebufferHandle fb)*: Called at the end of a scene submission to the forward pipeline.
### Communicating with Scripts
Depending on the host and embedded VM used you may be able to access a script component environment directly (eg. [SceneLuaVM_GetScriptEnv]).
If this is not possible however this can be done using the backend get and set value methods from any host VM. In some cases the transfer will be automatic while in other cases you may need to explicitely marshall the outgoing/incoming values.
Source VM/Target VM | CPython | Lua
--------------|---------|-----
CPython | N/A | `LuaObject.Pack`/`LuaObject.Unpack`
Lua | N/A | -
Sending a value from a host CPython VM to an embedded Lua VM and back.
```python
# from host Python to embedded Lua
lua_vm.SetScriptValue('target_node', lua_vm.MakeLuaObject().Pack(node))
# from embedded Lua to host Python
target_node = lua_vm.GetScriptValue('target_node').Unpack()
```
Sending a value from a host Lua VM to an embedded Lua VM and back.
```lua
-- from host Lua to embedded Lua
lua_vm.SetScriptValue('target_node', node)
-- from embedded Lua to host Lua
node = lua_vm.GetScriptValue('target_node')
```
## Keeping the System Synchronized
Call the script system garbage collect method (eg. [SceneLuaVM_GarbageCollect]) on each update to ensure that destroyed nodes or components are properly removed. If you know that no node or component was destroyed during a particular update, not calling the garbage collector will save on performance.

View File

@ -1,99 +1,99 @@
.title Writing a Shader
[TOC]
## Shader Language Overview
Harfang uses [bgfx](https://bkaradzic.github.io/bgfx/index.html) as its rendering system, the cross-platform shader language is based on [GLSL](https://www.khronos.org/registry/OpenGL/specs/gl/GLSLangSpec.1.40.pdf) with a few key differences:
* No `bool/int` uniforms, all uniforms must be `float`.
* Attributes and varyings can be accessed only from `main()` function.
* Must use `SAMPLER2D/3D/CUBE/etc.` macros instead of `sampler2D/3D/Cube/etc.` tokens.
* Must use `vec2/3/4_splat(<value>)` instead of `vec2/3/4(<value>)`.
* Must use `mtxFromCols/mtxFromRows` when constructing matrices in shaders.
* Must use `mul(x, y)` when multiplying vectors and matrices.
* Must use `varying.def.sc` to define input/output semantic and precision instead of using `attribute/in` and `varying/in/out`.
* `$input/$output` tokens must appear at the begining of shader.
## Data Flow
In a shader, vertex attributes such as position or normal are send to the vertex shader as a stream of **attributes**. The vertex shader outputs are then interpolated across the rendered primitive and passed to the fragment shader as **varyings** to compute the final pixel color.
## Writing a Shader
A shader is composed of 3 files: a definition file, the vertex and fragment source files:
- `example_vs.sc`: Source for the vertex program.
- `example_fs.sc`: Source for the fragment program.
- `example_varying.def`: Shader definition file.
Both the vertex and fragment program must declare a main function.
```glsl
void main() { ... }
```
The shader definition file must list all inputs and outputs of the shader programs and associate them with standard semantics like `POSITION` or `NORMAL` (see [HLSL Semantics](https://docs.microsoft.com/fr-fr/windows/win32/direct3dhlsl/dx-graphics-hlsl-semantics?redirectedfrom=MSDN) for a complete list).
```glsl
vec3 vNormal : NORMAL;
vec3 a_position : POSITION;
vec3 a_normal : NORMAL;
```
In the vertex program use `$input` to declare an attribute and `$output` to declare a varying. `$input/$output` tokens must appear at the begining of the program.
By convention attributes must be named one of the following: `a_position`, `a_normal`, `a_tangent`, `a_bitangent`, `a_color0`, `a_color1`, `a_color2`, `a_color3`, `a_indices`, `a_weight`, `a_texcoord0`, `a_texcoord1`, `a_texcoord2`, `a_texcoord3`, `a_texcoord4`, `a_texcoord5`, `a_texcoord6`, `a_texcoord7`, `i_data0`, `i_data1`, `i_data2`, `i_data3` or `i_data4`.
The following vertex program declares that it takes two attributes as input and outputs to a single varying.
```glsl
$input a_position, a_normal
$output vNormal
#include <bgfx_shader.sh>
void main() {
vNormal = mul(u_model[0], vec4(a_normal * 2.0 - 1.0, 0.0)).xyz;
gl_Position = mul(u_modelViewProj, vec4(a_position, 1.0));
}
```
**Note:** Attributes and varyings can only be accessed from the shader main function.
Outputs from the vertex program then become inputs to the fragment program. The fragment program outputs its result to standard GLSL variables such as `gl_FragColor` (see [GLSL Language Specifications](https://www.khronos.org/registry/OpenGL/specs/gl/GLSLangSpec.1.40.pdf)).
```glsl
$input vNormal
#include <bgfx_shader.sh>
void main() {
vec3 normal = normalize(vNormal);
gl_FragColor = vec4(normal.x, normal.y, normal.z, 1.0);
}
```
## Passing Constants to a Shader
Constant values can be passed to a shader programs by using **uniforms**. This is done using [UniformSetValue] and [UniformSetTexture].
### Predefined Uniforms
The `bgfx_shader.sh` include file defines the following predefined uniforms.
Type | Symbol | Description
---- | ------ | -----------
vec4 | u_viewRect | View rectangle for current view, in pixels. (x, y, width, height)
vec4 | u_viewTexel | Inverse width and height. (1.0 / width, 1.0 / height, undef, undef)
mat4 | u_view | View matrix.
mat4 | u_invView | Inverse view matrix.
mat4 | u_proj | Projection matrix.
mat4 | u_invProj | Inverse projection matrix.
mat4 | u_viewProj | Concaneted view projection matrix.
mat4 | u_invViewProj | Concatenated inverted view projection matrix.
mat4 | u_model[BGFX_CONFIG_MAX_BONES] | Array of model matrices.
mat4 | u_modelView | Concatenated model view matrix, only the first model matrix from array is used.
mat4 | u_modelViewProj | Concatenated model view projection matrix.
vec4 | u_alphaRef | Alpha reference value for alpha test.
.title Writing a Shader
[TOC]
## Shader Language Overview
Harfang uses [bgfx](https://bkaradzic.github.io/bgfx/index.html) as its rendering system, the cross-platform shader language is based on [GLSL](https://www.khronos.org/registry/OpenGL/specs/gl/GLSLangSpec.1.40.pdf) with a few key differences:
* No `bool/int` uniforms, all uniforms must be `float`.
* Attributes and varyings can be accessed only from `main()` function.
* Must use `SAMPLER2D/3D/CUBE/etc.` macros instead of `sampler2D/3D/Cube/etc.` tokens.
* Must use `vec2/3/4_splat(<value>)` instead of `vec2/3/4(<value>)`.
* Must use `mtxFromCols/mtxFromRows` when constructing matrices in shaders.
* Must use `mul(x, y)` when multiplying vectors and matrices.
* Must use `varying.def.sc` to define input/output semantic and precision instead of using `attribute/in` and `varying/in/out`.
* `$input/$output` tokens must appear at the begining of shader.
## Data Flow
In a shader, vertex attributes such as position or normal are send to the vertex shader as a stream of **attributes**. The vertex shader outputs are then interpolated across the rendered primitive and passed to the fragment shader as **varyings** to compute the final pixel color.
## Writing a Shader
A shader is composed of 3 files: a definition file, the vertex and fragment source files:
- `example_vs.sc`: Source for the vertex program.
- `example_fs.sc`: Source for the fragment program.
- `example_varying.def`: Shader definition file.
Both the vertex and fragment program must declare a main function.
```glsl
void main() { ... }
```
The shader definition file must list all inputs and outputs of the shader programs and associate them with standard semantics like `POSITION` or `NORMAL` (see [HLSL Semantics](https://docs.microsoft.com/fr-fr/windows/win32/direct3dhlsl/dx-graphics-hlsl-semantics?redirectedfrom=MSDN) for a complete list).
```glsl
vec3 vNormal : NORMAL;
vec3 a_position : POSITION;
vec3 a_normal : NORMAL;
```
In the vertex program use `$input` to declare an attribute and `$output` to declare a varying. `$input/$output` tokens must appear at the begining of the program.
By convention attributes must be named one of the following: `a_position`, `a_normal`, `a_tangent`, `a_bitangent`, `a_color0`, `a_color1`, `a_color2`, `a_color3`, `a_indices`, `a_weight`, `a_texcoord0`, `a_texcoord1`, `a_texcoord2`, `a_texcoord3`, `a_texcoord4`, `a_texcoord5`, `a_texcoord6`, `a_texcoord7`, `i_data0`, `i_data1`, `i_data2`, `i_data3` or `i_data4`.
The following vertex program declares that it takes two attributes as input and outputs to a single varying.
```glsl
$input a_position, a_normal
$output vNormal
#include <bgfx_shader.sh>
void main() {
vNormal = mul(u_model[0], vec4(a_normal * 2.0 - 1.0, 0.0)).xyz;
gl_Position = mul(u_modelViewProj, vec4(a_position, 1.0));
}
```
**Note:** Attributes and varyings can only be accessed from the shader main function.
Outputs from the vertex program then become inputs to the fragment program. The fragment program outputs its result to standard GLSL variables such as `gl_FragColor` (see [GLSL Language Specifications](https://www.khronos.org/registry/OpenGL/specs/gl/GLSLangSpec.1.40.pdf)).
```glsl
$input vNormal
#include <bgfx_shader.sh>
void main() {
vec3 normal = normalize(vNormal);
gl_FragColor = vec4(normal.x, normal.y, normal.z, 1.0);
}
```
## Passing Constants to a Shader
Constant values can be passed to a shader programs by using **uniforms**. This is done using [UniformSetValue] and [UniformSetTexture].
### Predefined Uniforms
The `bgfx_shader.sh` include file defines the following predefined uniforms.
Type | Symbol | Description
---- | ------ | -----------
vec4 | u_viewRect | View rectangle for current view, in pixels. (x, y, width, height)
vec4 | u_viewTexel | Inverse width and height. (1.0 / width, 1.0 / height, undef, undef)
mat4 | u_view | View matrix.
mat4 | u_invView | Inverse view matrix.
mat4 | u_proj | Projection matrix.
mat4 | u_invProj | Inverse projection matrix.
mat4 | u_viewProj | Concaneted view projection matrix.
mat4 | u_invViewProj | Concatenated inverted view projection matrix.
mat4 | u_model[BGFX_CONFIG_MAX_BONES] | Array of model matrices.
mat4 | u_modelView | Concatenated model view matrix, only the first model matrix from array is used.
mat4 | u_modelViewProj | Concatenated model view projection matrix.
vec4 | u_alphaRef | Alpha reference value for alpha test.

View File

@ -1 +0,0 @@
.title Toolchain

View File

@ -1,15 +0,0 @@
.title Assets folders
.tutorial(goal="Access to the file system through assets folders", level="Intermediate", duration=15, lang=All, group="Filesystem")
#### Python
```python
[import:tutorials/filesystem/1_assets_folder.py]
```
#### Lua
```lua
[import:tutorials/filesystem/1_assets_folder.lua]
```

View File

@ -1,15 +0,0 @@
.title Recursive walk
.tutorial(goal="Walk recursively though the content of a directory", level="Beginner", duration=5, lang=All, group="Filesystem")
#### Python
```python
[import:tutorials/filesystem/3_Recursive_directory_listing.py]
```
#### Lua
```lua
[import:tutorials/filesystem/3_Recursive_directory_listing.lua]
```

View File

@ -1,15 +0,0 @@
.title Basic GUI
.tutorial(goal="Create and display a basic GUI using ImGui", level="Beginner", duration=15, lang=All, group="GUI")
#### Python
```python
[import:tutorials/imgui/1_basic.py]
```
#### Lua
```lua
[import:tutorials/imgui/1_basic.lua]
```

View File

@ -1,15 +0,0 @@
.title Basic loop
.tutorial(goal="Basic immediate rendering loop", level="Beginner", duration=10, lang=All, group="Immediate rendering")
#### Python
```python
[import:tutorials/immediate/1_basic_loop.py]
```
#### Lua
```lua
[import:tutorials/immediate/1_basic_loop.lua]
```

View File

@ -1,15 +0,0 @@
.title Immediate scene
.tutorial(goal="Immediate rendering of a scene without a pipeline", level="Beginner", duration=10, lang=All, group="Immediate rendering")
#### Python
```python
[import:tutorials/immediate/3_scene_no_pipeline.py]
```
#### Lua
```lua
[import:tutorials/immediate/3_scene_no_pipeline.lua]
```

View File

@ -1,15 +0,0 @@
.title List devices
.tutorial(goal="List the input devices", level="Beginner", duration=10, lang=All, group="Input")
#### Python
```python
[import:tutorials/input/1_list_devices.py]
```
#### Lua
```lua
[import:tutorials/input/1_list_devices.lua]
```

View File

@ -1,15 +0,0 @@
.title Input pad
.tutorial(goal="How to read values from the joypad - Using Joypad object", level="Beginner", duration=10, lang=All, group="Input")
#### Python
```python
[import:tutorials/input/8_read_pad.py]
```
#### Lua
```lua
[import:tutorials/input/8_read_pad.lua]
```

View File

@ -1,15 +0,0 @@
.title Input pad 2
.tutorial(goal="How to read values from the gamepad - Using hg.ReadGamepad", level="Beginner", duration=10, lang=All, group="Input")
#### Python
```python
[import:tutorials/input/9_read_pad_2.py]
```
#### Lua
```lua
[import:tutorials/input/9_read_pad_2.lua]
```

View File

@ -1,15 +0,0 @@
.title Read keyboard
.tutorial(goal="How to read from the keyboard", level="Beginner", duration=10, lang=All, group="Input")
#### Python
```python
[import:tutorials/input/2_read_keyboard.py]
```
#### Lua
```lua
[import:tutorials/input/2_read_keyboard.lua]
```

View File

@ -1,15 +0,0 @@
.title Read keyboard 2
.tutorial(goal="How to read from the keyboard - Using hg.ReadKeybard('default') function", level="Beginner", duration=10, lang=All, group="Input")
#### Python
```python
[import:tutorials/input/3_read_keyboard_2.py]
```
#### Lua
```lua
[import:tutorials/input/3_read_keyboard_2.lua]
```

View File

@ -1,15 +0,0 @@
.title Read keyboard 3
.tutorial(goal="How to read from the keyboard - Using hg.ReadKeyboard('raw') function", level="Beginner", duration=10, lang=All, group="Input")
#### Python
```python
[import:tutorials/input/4_read_keyboard_3.py]
```
#### Lua
```lua
[import:tutorials/input/4_read_keyboard_3.lua]
```

View File

@ -1,15 +0,0 @@
.title Read mouse
.tutorial(goal="How to read values from the mouse - Using Mouse object", level="Beginner", duration=10, lang=All, group="Input")
#### Python
```python
[import:tutorials/input/5_read_mouse.py]
```
#### Lua
```lua
[import:tutorials/input/5_read_mouse.lua]
```

View File

@ -1,15 +0,0 @@
.title Read mouse 2
.tutorial(goal="How to read values from the mouse - Using hg.ReadMouse", level="Beginner", duration=10, lang=All, group="Input")
#### Python
```python
[import:tutorials/input/6_read_mouse_2.py]
```
#### Lua
```lua
[import:tutorials/input/6_read_mouse_2.lua]
```

View File

@ -1,15 +0,0 @@
.title Read mouse 3
.tutorial(goal="How to read values from the mouse - Using hg.ReadMouse('raw')", level="Beginner", duration=10, lang=All, group="Input")
#### Python
```python
[import:tutorials/input/7_read_mouse_3.py]
```
#### Lua
```lua
[import:tutorials/input/7_read_mouse_3.lua]
```

View File

@ -1,15 +0,0 @@
.title Physics cubes
.tutorial(goal="Add and remove many physics objects", level="Intermediate", duration=10, lang=All, group="Physics")
#### Python
```python
[import:tutorials/physics/2_physics_cubes.py]
```
#### Lua
```lua
[import:tutorials/physics/2_physics_cubes.lua]
```

View File

@ -1,15 +0,0 @@
.title Impulse
.tutorial(goal="Apply an impulse to a rigid body", level="Intermediate", duration=10, lang=All, group="Physics")
#### Python
```python
[import:tutorials/physics/1_impulse.py]
```
#### Lua
```lua
[import:tutorials/physics/1_impulse.lua]
```

View File

@ -1,15 +0,0 @@
.title Physics overrides matrix
.tutorial(goal="Physics overrides Transform's matrix", level="Intermediate", duration=10, lang=All, group="Physics")
#### Python
```python
[import:tutorials/physics/3_test_physics_overrides_matrix.py]
```
#### Lua
```lua
[import:tutorials/physics/3_test_physics_overrides_matrix.lua]
```

View File

@ -1,15 +0,0 @@
.title Load picture
.tutorial(goal="Load a picture", level="Beginner", duration=5, lang=All, group="Picture")
#### Python
```python
[import:tutorials/picture/1_load_picture.py]
```
#### Lua
```python
[import:tutorials/picture/1_load_picture.py]
```

View File

@ -1,15 +0,0 @@
.title Save picture
.tutorial(goal="Save a picture", level="Beginner", duration=5, lang=All, group="Picture")
#### Python
```python
[import:tutorials/picture/2_save_picture.py]
```
#### Lua
```python
[import:tutorials/picture/2_save_picture.py]
```

View File

@ -1,15 +0,0 @@
.title Dynamics objects
.tutorial(goal="Render and animate a large amount of objects", level="Beginner", duration=15, lang=All, group="Scene")
#### Python
```python
[import:tutorials/scene/2_many_dynamics_objects.py]
```
#### Lua
```lua
[import:tutorials/scene/2_many_dynamics_objects.lua]
```

Some files were not shown because too many files have changed in this diff Show More