3.2.6 release.

This commit is contained in:
astrofra 2023-06-08 22:17:16 +02:00
parent 5303295db0
commit 36d1742af4
16 changed files with 230 additions and 39 deletions

View File

@ -2287,7 +2287,7 @@ static void _SetViewTransform(bgfx::ViewId view_id, const hg::Mat4 &view, const
gen.end_class(pprogram_ref)
gen.bind_variable("const hg::PipelineProgramRef hg::InvalidPipelineProgramRef")
gen.bind_function('CaptureTexture', 'uint32_t', ['const hg::PipelineResources &resources', 'const hg::TextureRef &tex', 'hg::Picture &pic'])
gen.bind_function('CaptureTexture', 'uint32_t', ['bgfx::ViewId &view_id', 'const hg::PipelineResources &resources', 'const hg::TextureRef &tex', 'const hg::Texture &read_back', 'hg::Picture &pic'], {'arg_in_out': ['view_id']})
# Texture with pipeline
gen.bind_function('hg::LoadTextureFromFile', 'hg::TextureRef', ['const char *path', 'uint32_t flags', 'hg::PipelineResources &resources'])

View File

@ -18,6 +18,38 @@ The easiest way is to drag and drop the resources folder on the assetc executabl
## Command-Line
Since HARFANG 3.2.5, `assetc` is now included in both _Python_ and _Lua_ versions.
### Python
In HARFANG Python 3.2.5 and above, `assetc` is packaged into the bdist wheel and can be invoked as a function of harfang.bin module.
In this example, we will compile a folder called `resources` and target the `OpenGL` API:
* From the command line:
```bash
python3 -m harfang.bin assetc resources -api GL
```
* As a Python module:
```python
import harfang.bin
harfang.bin.assetc('resources', '-api', 'GL')
```
### Lua
In HARFANG Lua 3.2.5 and above, `assetc` is packaged along with the Lua extension and can be invoked as a function of harfang.bin.
In this example, we will compile a folder called `resources` and target the `OpenGL` API:
```lua
hg_bin = require "harfang.bin"
hg_bin.assetc('resources', '-api', 'GL')
```
## Usage in detail
If you need more control over the compilation options, the command line gets the following parameters:
```

View File

@ -157,13 +157,21 @@ Color GetPixelRGBA(const Picture &pic, uint16_t x, uint16_t y) {
if (x >= pic.GetWidth() || y >= pic.GetHeight())
return Color::Zero;
const int size = size_of(pic.GetFormat());
int offset = numeric_cast<int>((x + (size_t)pic.GetWidth() * y) * size);
Color out(Color::Zero);
const uint8_t *data = pic.GetData();
for (int i = 0; i < size;)
out[i++] = data[offset++] / 255.f;
Color out = Color::Zero;
const PictureFormat &fmt = pic.GetFormat();
const int channel_count = GetChannelCount(fmt);
const size_t offset = (x + y * pic.GetWidth()) * size_of(fmt);
if (fmt == PF_RGBA32F) {
const float *in = reinterpret_cast<float *>(pic.GetData() + offset);
for (int i = 0; i < channel_count; i++) {
out[i] = in[i];
}
} else {
const uint8_t *in = pic.GetData() + offset;
for (int i = 0; i < channel_count; i++) {
out[i] = in[i] / 255.f;
}
}
return out;
}
@ -172,12 +180,20 @@ void SetPixelRGBA(Picture &pic, uint16_t x, uint16_t y, const Color &col) {
if ((x >= pic.GetWidth()) || (y >= pic.GetHeight()))
return;
const int size = size_of(pic.GetFormat());
int offset = numeric_cast<int>((x + (size_t)pic.GetWidth() * y) * size);
uint8_t *data = pic.GetData();
for (int i = 0; i < size;)
data[offset++] = uint8_t(Clamp(col[i++], 0.f, 1.f) * 255.f);
const PictureFormat &fmt = pic.GetFormat();
const int channel_count = GetChannelCount(fmt);
const size_t offset = (x + y * pic.GetWidth()) * size_of(fmt);
if (fmt == PF_RGBA32F) {
float *out = reinterpret_cast<float *>(pic.GetData() + offset);
for (int i = 0; i < channel_count; i++) {
out[i] = col[i];
}
} else {
uint8_t *out = pic.GetData() + offset;
for (int i = 0; i < channel_count; i++) {
out[i] = uint8_t(Clamp(col[i], 0.f, 1.f) * 255.f);
}
}
}
//

View File

@ -1609,9 +1609,8 @@ bgfx::VertexLayout VertexLayoutPosFloatNormUInt8TexCoord0UInt8() {
bgfx::VertexLayout vtx_layout;
vtx_layout.begin()
.add(bgfx::Attrib::Position, 3, bgfx::AttribType::Float)
.add(bgfx::Attrib::Color0, 4, bgfx::AttribType::Uint8, true, false)
.add(bgfx::Attrib::Normal, 3, bgfx::AttribType::Uint8, true, true)
.add(bgfx::Attrib::TexCoord0, 2, bgfx::AttribType::Uint8, true, false)
.skip(2)
.end();
return vtx_layout;
}
@ -2317,9 +2316,20 @@ TextureRef LoadTextureFromAssets(const char *name, uint64_t flags, PipelineResou
}
//
uint32_t CaptureTexture(const PipelineResources &resources, const TextureRef &t, Picture &pic) {
uint32_t CaptureTexture(bgfx::ViewId &view_id, const PipelineResources &resources, const TextureRef &t, const Texture &readback, Picture &pic) {
const auto ref = resources.textures.Get(t);
return bgfx::readTexture(ref.handle, pic.GetData());
constexpr uint64_t expected_flags = BGFX_TEXTURE_READ_BACK | BGFX_TEXTURE_BLIT_DST;
if (((readback.flags & expected_flags) != expected_flags) || (readback.flags & BGFX_TEXTURE_RT_MASK)) {
hg::error("invalid texture flags.");
return BGFX_INVALID_HANDLE;
}
view_id++;
bgfx::touch(view_id);
bgfx::blit(view_id, readback.handle, 0, 0, ref.handle);
return bgfx::readTexture(readback.handle, pic.GetData());
}
//

View File

@ -529,7 +529,7 @@ TextureRef LoadTextureFromAssets(const char *path, uint64_t flags, PipelineResou
/// A Picture object can be accessed by the CPU.
/// This function is asynchronous and its result will not be available until the returned frame counter is equal or greater to the frame counter returned by
/// Frame.
uint32_t CaptureTexture(const PipelineResources &resources, const TextureRef &t, Picture &pic);
uint32_t CaptureTexture(bgfx::ViewId &view_id, const PipelineResources &resources, const TextureRef &t, const Texture &readback, Picture &pic);
MaterialRef LoadMaterialRef(const Reader &ir, const Handle &h, const char *path, const Reader &deps_ir, const ReadProvider &deps_ip,
PipelineResources &resources, const PipelineInfo &pipeline, bool queue_texture_loads, bool do_not_load_resources, bool silent = false);

View File

@ -1259,11 +1259,7 @@ ScenePlayAnimRef Scene::GetOnInstantiatePlayAnimRef(ComponentRef ref) {
Instance Scene::GetNodeInstance(NodeRef ref) const {
const auto cref = GetNodeInstanceRef(ref);
if (cref != InvalidComponentRef)
return {scene_ref, cref};
warn("Invalid node instance");
return {};
return (cref != InvalidComponentRef) ? Instance{scene_ref, cref} : Instance{};
}
ComponentRef Scene::GetNodeInstanceRef(NodeRef ref) const {
@ -2529,6 +2525,8 @@ float GetAnimableNodePropertyFloat(const Scene &scene, NodeRef ref, const std::s
return node.GetLight().GetDiffuseIntensity();
else if (name == "Light.SpecularIntensity")
return node.GetLight().GetSpecularIntensity();
else if (name == "Camera.Fov")
return node.GetCamera().GetFov();
}
return 1.f;
}
@ -2539,6 +2537,8 @@ void SetAnimableNodePropertyFloat(Scene &scene, NodeRef ref, const std::string &
node.GetLight().SetDiffuseIntensity(v);
else if (name == "Light.SpecularIntensity")
node.GetLight().SetSpecularIntensity(v);
else if (name == "Camera.Fov")
node.GetCamera().SetFov(v);
}
}

View File

@ -45,7 +45,7 @@ inline void from_json(const json &j, gen_ref &v) {
v.idx = j;
}
NLOHMANN_JSON_SERIALIZE_ENUM(RigidBodyType, {{RBT_Dynamic, "dynamic"}, {RBT_Kinematic, "kinematic"}});
NLOHMANN_JSON_SERIALIZE_ENUM(RigidBodyType, {{RBT_Dynamic, "dynamic"}, {RBT_Kinematic, "kinematic"}, {RBT_Static, "static"}});
NLOHMANN_JSON_SERIALIZE_ENUM(
CollisionType, {{CT_Sphere, "sphere"}, {CT_Cube, "cube"}, {CT_Cone, "cone"}, {CT_Capsule, "capsule"}, {CT_Cylinder, "cylinder"}, {CT_Mesh, "mesh"}});

View File

@ -141,9 +141,7 @@ inline bool operator==(const Color &a, const Color &b) {
return Equal(a.r, b.r) && Equal(a.g, b.g) && Equal(a.b, b.b) && Equal(a.a, b.a);
}
inline bool operator!=(const Color &a, const Color &b) {
return NotEqual(a.r, b.r) && NotEqual(a.g, b.g) && NotEqual(a.b, b.b) && NotEqual(a.a, b.a);
}
inline bool operator!=(const Color &a, const Color &b) { return NotEqual(a.r, b.r) || NotEqual(a.g, b.g) || NotEqual(a.b, b.b) || NotEqual(a.a, b.a); }
Color operator+(const Color &a, const Color &b);
Color operator+(const Color &a, const float v);

View File

@ -224,9 +224,23 @@ bool ClassifySegment(const MinMax &mm, const Vec3 &p1, const Vec3 &p2, Vec3 &itr
}
MinMax operator*(const Mat4 &m, const MinMax &mm) {
const Vec3 p0 = m * mm.mn;
const Vec3 p1 = m * mm.mx;
return MinMax(Min(p0, p1), Max(p0, p1));
MinMax out(GetT(m), GetT(m));
// find extreme points by considering product of min and max with each component of M
for (uint32_t j = 0; j < 3; ++j) {
for (uint32_t i = 0; i < 3; ++i) {
float a = m.m[j][i] * mm.mn[i], b = m.m[j][i] * mm.mx[i];
if (a < b) {
out.mn[j] += a;
out.mx[j] += b;
} else {
out.mn[j] += b;
out.mx[j] += a;
}
}
}
return out;
}
void GetMinMaxVertices(const MinMax &minmax, Vec3 out[8]) {

View File

@ -31,6 +31,53 @@ static void test_LoadSave() {
}
}
static void test_SetGetPixels() {
Picture rgb(2, 2, PF_RGB24);
Picture rgba(2, 2, PF_RGBA32);
Picture rgbaf(2, 2, PF_RGBA32F);
Color pal[4] = {{1.f, 1.f, 1.f, 1.f}, {1.f, 0.f, 0.f, 0.f}, {0.f, 1.f, 0.f, 1.f}, {0.f, 0.f, 1.f, 0.f}};
for (int j = 0; j < 2; j++) {
for (int i = 0; i < 2; i++) {
Color in = pal[i + j * 2];
SetPixelRGBA(rgb, i, j, in);
SetPixelRGBA(rgba, i, j, in);
SetPixelRGBA(rgbaf, i, j, in);
// test if it doesn't crash
SetPixelRGBA(rgb, i + 4, j, in);
SetPixelRGBA(rgba, i + 4, j, in);
SetPixelRGBA(rgbaf, i + 4, j, in);
SetPixelRGBA(rgb, i, j + 4, in);
SetPixelRGBA(rgba, i, j + 4, in);
SetPixelRGBA(rgbaf, i, j + 4, in);
}
}
for (int j = 0; j < 2; j++) {
for (int i = 0; i < 2; i++) {
Color expected = pal[i + j * 2];
TEST_CHECK(GetPixelRGBA(rgb, i, j) == Color(expected.r, expected.g, expected.b, 0.f));
TEST_CHECK(GetPixelRGBA(rgba, i, j) == expected);
TEST_CHECK(GetPixelRGBA(rgbaf, i, j) == expected);
TEST_CHECK(GetPixelRGBA(rgb, i + 10, j) == Color::Zero);
TEST_CHECK(GetPixelRGBA(rgba, i + 10, j) == Color::Zero);
TEST_CHECK(GetPixelRGBA(rgbaf, i + 10, j) == Color::Zero);
TEST_CHECK(GetPixelRGBA(rgb, i, j + 10) == Color::Zero);
TEST_CHECK(GetPixelRGBA(rgba, i, j + 10) == Color::Zero);
TEST_CHECK(GetPixelRGBA(rgbaf, i, j + 10) == Color::Zero);
}
}
}
void test_picture() {
test_LoadSave();
test_SetGetPixels();
}

View File

@ -116,7 +116,9 @@ void test_color() {
Color c0(75.757575f, 1.207f, -44.01f, 0.192f);
Color c1(75.757575f, 1.207f, -44.01f, 0.192f);
Color c2(70.0101f, 4.4444f, 1.0001f, 0.4f);
Color c3(70.0101f, 1.207f, -44.01f, 0.192f);
TEST_CHECK(c0 != c2);
TEST_CHECK(c0 != c3);
TEST_CHECK((c0 != c1) == false);
}
{

View File

@ -128,10 +128,25 @@ void test_minmax() {
MinMax m0(-Vec3::One, Vec3::One);
Mat4 trs = TransformationMat4(Vec3(-0.5f, 0.5f, 0.8f), Deg3(45.f, -30.f, 60.f), Vec3(2.f, 0.8f, 1.f));
MinMax m1 = trs * m0;
Vec3 p0 = trs * m0.mn;
Vec3 p1 = trs * m0.mx;
TEST_CHECK(AlmostEqual(m1.mn, Min(p0, p1), 0.000001f));
TEST_CHECK(AlmostEqual(m1.mx, Max(p0, p1), 0.000001f));
// Brute force computation of the new minmax
Vec3 p[8] = {
trs * m0.mn,
trs * Vec3(m0.mn.x, m0.mn.y, m0.mx.z),
trs * Vec3(m0.mn.x, m0.mx.y, m0.mn.z),
trs * Vec3(m0.mn.x, m0.mx.y, m0.mx.z),
trs * Vec3(m0.mx.x, m0.mn.y, m0.mn.z),
trs * Vec3(m0.mx.x, m0.mn.y, m0.mx.z),
trs * Vec3(m0.mx.x, m0.mx.y, m0.mn.z),
trs * m0.mx
};
Vec3 p0 = p[0], p1 = p[0];
for (int i = 1; i < 8; i++) {
p0 = Min(p0, p[i]);
p1 = Max(p1, p[i]);
}
TEST_CHECK(AlmostEqual(m1.mn, p0, 0.000001f));
TEST_CHECK(AlmostEqual(m1.mx, p1, 0.000001f));
}
{
MinMax m0(-Vec3::One, Vec3::One);

View File

@ -12,7 +12,9 @@ using namespace hg;
static void test_NewWindow() {
Window *win = NewWindow(320, 200);
#ifndef __linux__ // Window init may fail, due to X11 authority issues within the CI. Disabling this test on Linux.
TEST_CHECK(win != nullptr);
#endif
for (int i = 0; i < 100; ++i) {
UpdateWindow(win);
@ -24,8 +26,9 @@ static void test_NewWindow() {
static void test_SetWindowPos() {
Window *win = NewWindow(320, 200);
#ifndef __linux__ // Window init may fail, due to X11 authority issues within the CI. Disabling this test on Linux.
TEST_CHECK(win != nullptr);
#endif
SetWindowPos(win, iVec2(200, 200));
for (int i = 0; i < 5; ++i) {

View File

@ -1 +1 @@
3.2.5
3.2.6

View File

@ -10,7 +10,20 @@ Harfang is a 3D real time visualization framework for the industry, the educatio
See https://www.harfang3d.com/license for licensing terms.
|
| **Quickstart**
| **Quickstart, command line method (Windows/Linux)**
1. Clone the tutorials' repository:
- ``git clone https://github.com/harfang3d/tutorials-hg2.git``
2. Enter the repository:
- ``cd tutorials-hg2``
3. Compile the tutorial resources:
- ``python3 -m harfang.bin assetc resources``
- After the compilation process finishes, you should see a ``resources_compiled`` folder next to the resources folder.
4. Run the tutorials:
- ``D:\tutorials-hg2>python3 draw_lines.py``
|
| **Quickstart, drag & drop method (Windows)**
1. Download the tutorials https://github.com/harfang3d/tutorials-hg2 and unzip them to your computer (eg. *d:/tutorials-hg2*).
2. To compile the tutorial resources, download **assetc** for your platform: https://dev.harfang3d.com/releases/
@ -27,7 +40,17 @@ Alternatively you can open the tutorial folder and run the provided debug target
|
| **Screenshots**
The following screenshots were captured on a 2070RTX in 1080P running at 60FPS, GI is performed using screen space raytracing and does not require RTX capable hardware.
The following screenshots were captured on a 2060RTX in 1080P running at 60FPS, GI is performed using screen space raytracing and does not require RTX capable hardware.
|
| The NVIDIA USD Attic using the AAA renderer *(Courtesy of NVIDIA)*
.. image:: https://raw.githubusercontent.com/harfang3d/image-storage/main/portfolio/3.2.6/attic_harfang.png
|
| Marine Melodies *(interactive musical experience)*
.. image:: https://raw.githubusercontent.com/harfang3d/image-storage/main/portfolio/3.2.5/marine-melodies-screenshot_000.png
|
| Cyber City *(CyberPunk City, CyberPunk Girl and Robot R32 by art-equilibrium, ILranch and ZeroArt3d)*

View File

@ -1,3 +1,34 @@
# [3.2.6] - 2023-06-05
This minor release provides several fixes and brings a functionning API to capture the framebuffer and save it as a picture.
### Framework integration and source code maintenance
* Fixed the missing `DisableCursor` on SDL (by @PMP-P).
* Fixed Linux Golang module build script.
### Rendering
* Added the ability to set the pixel center offset for the projection matrix: `SetCameraCenterOffset` and `GetCameraCenterOffset` (by @RobEwbank1).
* Resolved issue #50 (fix CaptureTexture() for Python / Lua)":
* **OLD:** `uint32_t CaptureTexture(const PipelineResources &resources, const TextureRef &t, Picture &pic)`
* **NEW:** `uint32_t CaptureTexture(bgfx::ViewId &view_id, const PipelineResources &resources, const TextureRef &t, const Texture &readback, Picture &pic)`
* Fixed Picture Set/Get RGBA: `GetPixelRGBA` and `SetPixelRGBA`.
* How does the framebuffer capture work ?
* In order to grab the framebuffer, create an empty picture: `picture = hg.Picture(512, 512, hg.PF_RGBA32)`
* The texture framebuffer is created as in [`draw to texture`](https://github.com/harfang3d/tutorials-hg2/blob/da92f5dc96099dfc315c90d3ea188fc30f18f28f/scene_draw_to_texture.lua)
* Then, set framebuffer to `TF_ReadBack`: `tex_readback = hg.CreateTexture(512, 512, "readback", hg.TF_ReadBack | hg.TF_BlitDestination, hg.TF_RGBA8)`
* When submitting the scene, target the framebuffer: `hg.SubmitSceneToPipeline(view_id, scene, hg.IntRect(0, 0, 512, 512), true, pipeline, res, frame_buffer.handle)`
* The capture will be made asynchronously: `frame_count_capture, view_id = hg.CaptureTexture(view_id, res, tex_color_ref, tex_readback, picture)`
* Then, only when the current `frame` counter is greater or equal to `frame_count_capture`, save the picture: `hg.SavePNG(picture, "capture.png")`
### Misc bug fix
* Fixed `!=` color operator.
* Brought back the old `minmax` transform to fix #49.
* Removed warning message ("Invalid node instance").
* Resolve "Tutorials issues on Linux Ubuntu" (Fixed `VertexLayoutPosFloatNormUInt8TexCoord0UInt8`).
* Studio related fixes
* `RBT_Static` wasn't saved properly in the scene file.
* Fixed `GetAnimableNodePropertyFloat` and `SetAnimableNodePropertyFloat` to handle the camera FOV.
# [3.2.5] - 2022-12-09
This minor release provides several fixes and new features in the VR/XR and Physics areas. Platform compatibility was slightly improved as well on OS X and WASM (still experimental):