Lagrange
Loading...
Searching...
No Matches
bind_scene.h
1/*
2 * Copyright 2023 Adobe. All rights reserved.
3 * This file is licensed to you under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License. You may obtain a copy
5 * of the License at http://www.apache.org/licenses/LICENSE-2.0
6 *
7 * Unless required by applicable law or agreed to in writing, software distributed under
8 * the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR REPRESENTATIONS
9 * OF ANY KIND, either express or implied. See the License for the specific language
10 * governing permissions and limitations under the License.
11 */
12#pragma once
13
14#include <lagrange/AttributeValueType.h>
15#include <lagrange/Logger.h>
16#include <lagrange/python/binding.h>
17#include <lagrange/python/tensor_utils.h>
18#include <lagrange/scene/Scene.h>
19#include <lagrange/scene/internal/scene_string_utils.h>
20#include <lagrange/scene/scene_convert.h>
21#include <lagrange/scene/scene_utils.h>
22#include <lagrange/utils/assert.h>
23
24#include "bind_value.h"
25
26namespace lagrange::python {
27
28namespace nb = nanobind;
29
30void bind_scene(nb::module_& m)
31{
32 using namespace lagrange::scene;
33 using Scalar = double;
34 using Index = uint32_t;
35 using SceneType = Scene<Scalar, Index>;
36
37 nb::bind_vector<SafeVector<ElementId>>(m, "ElementIdList");
38 nb::bind_safe_vector<SafeVector<Node>>(m, "NodeList");
39 nb::bind_safe_vector<SafeVector<SceneMeshInstance>>(m, "SceneMeshInstanceList");
40 nb::bind_safe_vector<SafeVector<SurfaceMesh<Scalar, Index>>>(m, "SurfaceMeshList");
41 nb::bind_safe_vector<SafeVector<ImageExperimental>>(m, "ImageList");
42 nb::bind_safe_vector<SafeVector<Texture>>(m, "TextureList");
43 nb::bind_safe_vector<SafeVector<MaterialExperimental>>(m, "MaterialList");
44 nb::bind_safe_vector<SafeVector<Light>>(m, "LightList");
45 nb::bind_safe_vector<SafeVector<Camera>>(m, "CameraList");
46 nb::bind_safe_vector<SafeVector<Skeleton>>(m, "SkeletonList");
47 nb::bind_safe_vector<SafeVector<Animation>>(m, "AnimationList");
48
49 nb::class_<lagrange::scene::Extensions>(m, "Extensions")
50 .def(
51 "__repr__",
52 [](const lagrange::scene::Extensions& self) {
53 return scene::internal::to_string(self);
54 })
55 .def_prop_ro("size", &Extensions::size)
56 .def_prop_ro("empty", &Extensions::empty)
57 .def_rw(
58 "data",
59 &Extensions::data,
60 nb::rv_policy::reference_internal,
61 "Raw data stored in this extension as a dict");
62
63 nb::class_<SceneMeshInstance>(
64 m,
65 "SceneMeshInstance",
66 "Pairs a mesh with its materials (zero, one, or more)")
67 .def(nb::init<>())
68 .def(
69 "__repr__",
70 [](const SceneMeshInstance& self) { return scene::internal::to_string(self); })
71 .def_prop_rw(
72 "mesh",
73 [](SceneMeshInstance& self) -> std::optional<ElementId> {
74 if (self.mesh != invalid_element)
75 return self.mesh;
76 else
77 return {};
78 },
79 [](SceneMeshInstance& self, ElementId mesh) { self.mesh = mesh; },
80 "Mesh index. Has to be a valid index in the scene.meshes vector (None if invalid)")
81 .def_rw(
82 "materials",
83 &SceneMeshInstance::materials,
84 "Material indices in the scene.materials vector. This is typically a single material "
85 "index. When a single mesh uses multiple materials, the AttributeName::material_id "
86 "facet attribute should be defined.");
87
88 nb::class_<Node>(m, "Node", "Represents a node in the scene hierarchy")
89 .def(nb::init<>())
90 .def("__repr__", [](const Node& self) { return scene::internal::to_string(self); })
91 .def_rw("name", &Node::name, "Node name. May not be unique and can be empty")
92 .def_prop_rw(
93 "transform",
94 [](Node& node) {
95 return nb::ndarray<nb::numpy, float, nb::f_contig, nb::shape<4, 4>>(
96 node.transform.data(),
97 {4, 4},
98 nb::find(node),
99 {1, 4});
100 },
101 [](Node& node, nb::ndarray<nb::numpy, const float, nb::shape<4, 4>> t) -> void {
102 auto view = t.view<float, nb::ndim<2>>();
103 // Explicit 2D indexing because the input ndarray can be either row or column major.
104 for (size_t i = 0; i < 4; i++) {
105 for (size_t j = 0; j < 4; j++) {
106 node.transform.data()[i + j * 4] = view(i, j);
107 }
108 }
109 },
110 "Transform of the node, relative to its parent")
111 .def_prop_rw(
112 "parent",
113 [](Node& node) -> std::optional<ElementId> {
114 if (node.parent != invalid_element)
115 return node.parent;
116 else
117 return {};
118 },
119 [](Node& node, ElementId parent) { node.parent = parent; },
120 "Parent index. May be invalid if the node has no parent (e.g. the root)")
121 .def_rw("children", &Node::children, "Children indices. May be empty")
122 .def_rw("meshes", &Node::meshes, "List of meshes contained in this node")
123 .def_rw("cameras", &Node::cameras, "List of cameras contained in this node")
124 .def_rw("lights", &Node::lights, "List of lights contained in this node")
125 .def_rw("extensions", &Node::extensions);
126
127 nb::class_<ImageBufferExperimental> image_buffer(
128 m,
129 "ImageBuffer",
130 "Minimalistic image data structure that stores the raw image data");
131 image_buffer.def(nb::init<>())
132 .def(
133 "__repr__",
134 [](const ImageBufferExperimental& self) { return scene::internal::to_string(self); })
135 .def_ro("width", &ImageBufferExperimental::width, "Image width")
136 .def_ro("height", &ImageBufferExperimental::height, "Image height")
137 .def_ro(
138 "num_channels",
139 &ImageBufferExperimental::num_channels,
140 "Number of image channels (must be 1, 3, or 4)")
141 .def_prop_rw(
142 "data",
143 [](ImageBufferExperimental& self) {
144 size_t shape[3] = {self.height, self.width, self.num_channels};
145 switch (self.element_type) {
146 case AttributeValueType::e_int8_t:
147 return nb::cast(
148 nb::ndarray<int8_t, nb::numpy, nb::c_contig, nb::device::cpu>(
149 reinterpret_cast<int8_t*>(self.data.data()),
150 3,
151 shape,
152 nb::find(self)),
153 nb::rv_policy::reference_internal);
154 case AttributeValueType::e_uint8_t:
155 return nb::cast(
156 nb::ndarray<uint8_t, nb::numpy, nb::c_contig, nb::device::cpu>(
157 reinterpret_cast<uint8_t*>(self.data.data()),
158 3,
159 shape,
160 nb::find(self)),
161 nb::rv_policy::reference_internal);
162 case AttributeValueType::e_int16_t:
163 return nb::cast(
164 nb::ndarray<int16_t, nb::numpy, nb::c_contig, nb::device::cpu>(
165 reinterpret_cast<int16_t*>(self.data.data()),
166 3,
167 shape,
168 nb::find(self)),
169 nb::rv_policy::reference_internal);
170 case AttributeValueType::e_uint16_t:
171 return nb::cast(
172 nb::ndarray<uint16_t, nb::numpy, nb::c_contig, nb::device::cpu>(
173 reinterpret_cast<uint16_t*>(self.data.data()),
174 3,
175 shape,
176 nb::find(self)),
177 nb::rv_policy::reference_internal);
178 case AttributeValueType::e_int32_t:
179 return nb::cast(
180 nb::ndarray<int32_t, nb::numpy, nb::c_contig, nb::device::cpu>(
181 reinterpret_cast<int32_t*>(self.data.data()),
182 3,
183 shape,
184 nb::find(self)),
185 nb::rv_policy::reference_internal);
186 case AttributeValueType::e_uint32_t:
187 return nb::cast(
188 nb::ndarray<uint32_t, nb::numpy, nb::c_contig, nb::device::cpu>(
189 reinterpret_cast<uint32_t*>(self.data.data()),
190 3,
191 shape,
192 nb::find(self)),
193 nb::rv_policy::reference_internal);
194 case AttributeValueType::e_int64_t:
195 return nb::cast(
196 nb::ndarray<int64_t, nb::numpy, nb::c_contig, nb::device::cpu>(
197 reinterpret_cast<int64_t*>(self.data.data()),
198 3,
199 shape,
200 nb::find(self)),
201 nb::rv_policy::reference_internal);
202 case AttributeValueType::e_uint64_t:
203 return nb::cast(
204 nb::ndarray<uint64_t, nb::numpy, nb::c_contig, nb::device::cpu>(
205 reinterpret_cast<uint64_t*>(self.data.data()),
206 3,
207 shape,
208 nb::find(self)),
209 nb::rv_policy::reference_internal);
210 case AttributeValueType::e_float:
211 return nb::cast(
212 nb::ndarray<float, nb::numpy, nb::c_contig, nb::device::cpu>(
213 reinterpret_cast<float*>(self.data.data()),
214 3,
215 shape,
216 nb::find(self)),
217 nb::rv_policy::reference_internal);
218 case AttributeValueType::e_double:
219 return nb::cast(
220 nb::ndarray<double, nb::numpy, nb::c_contig, nb::device::cpu>(
221 reinterpret_cast<double*>(self.data.data()),
222 3,
223 shape,
224 nb::find(self)),
225 nb::rv_policy::reference_internal);
226 default: throw nb::type_error("Unsupported image buffer `dtype`!");
227 }
228 },
229 [](ImageBufferExperimental& self,
230 nb::ndarray<nb::numpy, nb::c_contig, nb::device::cpu> tensor) {
231 la_runtime_assert(tensor.ndim() == 3);
232 self.width = tensor.shape(1);
233 self.height = tensor.shape(0);
234 self.num_channels = tensor.shape(2);
235 auto dtype = tensor.dtype();
236 if (dtype == nb::dtype<int8_t>()) {
237 self.element_type = AttributeValueType::e_int8_t;
238 } else if (dtype == nb::dtype<uint8_t>()) {
239 self.element_type = AttributeValueType::e_uint8_t;
240 } else if (dtype == nb::dtype<int16_t>()) {
241 self.element_type = AttributeValueType::e_int16_t;
242 } else if (dtype == nb::dtype<uint16_t>()) {
243 self.element_type = AttributeValueType::e_uint16_t;
244 } else if (dtype == nb::dtype<int32_t>()) {
245 self.element_type = AttributeValueType::e_int32_t;
246 } else if (dtype == nb::dtype<uint32_t>()) {
247 self.element_type = AttributeValueType::e_uint32_t;
248 } else if (dtype == nb::dtype<int64_t>()) {
249 self.element_type = AttributeValueType::e_int64_t;
250 } else if (dtype == nb::dtype<uint64_t>()) {
251 self.element_type = AttributeValueType::e_uint64_t;
252 } else if (dtype == nb::dtype<float>()) {
253 self.element_type = AttributeValueType::e_float;
254 } else if (dtype == nb::dtype<double>()) {
255 self.element_type = AttributeValueType::e_double;
256 } else {
257 throw nb::type_error("Unsupported input tensor `dtype`!");
258 }
259 self.data.resize(tensor.nbytes());
260 std::copy(
261 reinterpret_cast<uint8_t*>(tensor.data()),
262 reinterpret_cast<uint8_t*>(tensor.data()) + tensor.nbytes(),
263 self.data.data());
264 },
265 "Raw buffer of size (width * height * num_channels * num_bits_per_element / 8) bytes "
266 "containing image data")
267 .def_prop_ro(
268 "dtype",
269 [](ImageBufferExperimental& self) -> std::optional<nb::type_object> {
270 auto np = nb::module_::import_("numpy");
271 switch (self.element_type) {
272 case AttributeValueType::e_int8_t: return np.attr("int8");
273 case AttributeValueType::e_int16_t: return np.attr("int16");
274 case AttributeValueType::e_int32_t: return np.attr("int32");
275 case AttributeValueType::e_int64_t: return np.attr("int64");
276 case AttributeValueType::e_uint8_t: return np.attr("uint8");
277 case AttributeValueType::e_uint16_t: return np.attr("uint16");
278 case AttributeValueType::e_uint32_t: return np.attr("uint32");
279 case AttributeValueType::e_uint64_t: return np.attr("uint64");
280 case AttributeValueType::e_float: return np.attr("float32");
281 case AttributeValueType::e_double: return np.attr("float64");
282 default: logger().warn("Image buffer has an unknown dtype."); return std::nullopt;
283 }
284 },
285 "The scalar type of the elements in the buffer");
286
287 nb::class_<ImageExperimental> image(
288 m,
289 "Image",
290 "Image structure that can store either image data or reference to an image file");
291 image.def(nb::init<>())
292 .def(
293 "__repr__",
294 [](const ImageExperimental& self) { return scene::internal::to_string(self); })
295 .def_rw(
296 "name",
297 &ImageExperimental::name,
298 "Image name. Not guaranteed to be unique and can be empty")
299 .def_rw("image", &ImageExperimental::image, "Image data")
300 .def_prop_rw(
301 "uri",
302 [](const ImageExperimental& self) -> std::optional<std::string> {
303 if (self.uri.empty())
304 return {};
305 else
306 return self.uri.string();
307 },
308 [](ImageExperimental& self, std::optional<std::string> uri) {
309 if (uri.has_value())
310 self.uri = fs::path(uri.value());
311 else
312 self.uri = fs::path();
313 },
314 "Image file path. This path is relative to the file that contains the scene. It is "
315 "only valid if image data should be mapped to an external file")
316 .def_rw("extensions", &ImageExperimental::extensions, "Image extensions");
317
318 nb::class_<TextureInfo>(
319 m,
320 "TextureInfo",
321 "Pair of texture index (which texture to use) and texture coordinate index (which set of "
322 "UVs to use)")
323 .def(nb::init<>())
324 .def("__repr__", [](const TextureInfo& self) { return scene::internal::to_string(self); })
325 .def_prop_rw(
326 "index",
327 [](const TextureInfo& self) -> std::optional<ElementId> {
328 if (self.index != invalid_element)
329 return self.index;
330 else
331 return {};
332 },
333 [](TextureInfo& self, std::optional<ElementId> index) {
334 if (index.has_value())
335 self.index = index.value();
336 else
337 self.index = invalid_element;
338 },
339 "Texture index. Index in scene.textures vector. `None` if not set")
340 .def_rw(
341 "texcoord",
342 &TextureInfo::texcoord,
343 "Index of UV coordinates. Usually stored in the mesh as `texcoord_x` attribute where x "
344 "is this variable. This is typically 0");
345
346 nb::class_<MaterialExperimental> material(
347 m,
348 "Material",
349 "PBR material, based on the gltf specification. This is subject to change, to support more "
350 "material models");
351 material.def(nb::init<>())
352 .def(
353 "__repr__",
354 [](const MaterialExperimental& self) { return scene::internal::to_string(self); })
355 .def_rw(
356 "name",
357 &MaterialExperimental::name,
358 "Material name. May not be unique, and can be empty")
359 .def_rw("base_color_value", &MaterialExperimental::base_color_value, "Base color value")
360 .def_rw(
361 "base_color_texture",
362 &MaterialExperimental::base_color_texture,
363 "Base color texture")
364 .def_rw(
365 "alpha_mode",
366 &MaterialExperimental::alpha_mode,
367 "The alpha mode specifies how to interpret the alpha value of the base color")
368 .def_rw("alpha_cutoff", &MaterialExperimental::alpha_cutoff, "Alpha cutoff value")
369 .def_rw("emissive_value", &MaterialExperimental::emissive_value, "Emissive color value")
370 .def_rw("emissive_texture", &MaterialExperimental::emissive_texture, "Emissive texture")
371 .def_rw("metallic_value", &MaterialExperimental::metallic_value, "Metallic value")
372 .def_rw("roughness_value", &MaterialExperimental::roughness_value, "Roughness value")
373 .def_rw(
374 "metallic_roughness_texture",
375 &MaterialExperimental::metallic_roughness_texture,
376 "Metalness and roughness are packed together in a single texture. Green channel has "
377 "roughness, blue channel has metalness")
378 .def_rw("normal_texture", &MaterialExperimental::normal_texture, "Normal texture")
379 .def_rw(
380 "normal_scale",
381 &MaterialExperimental::normal_scale,
382 "Normal scaling factor. normal = normalize(<sampled tex value> * 2 - 1) * vec3(scale, "
383 "scale, 1)")
384 .def_rw("occlusion_texture", &MaterialExperimental::occlusion_texture, "Occlusion texture")
385 .def_rw(
386 "occlusion_strength",
387 &MaterialExperimental::occlusion_strength,
388 "Occlusion strength. color = lerp(color, color * <sampled tex value>, strength)")
389 .def_rw(
390 "double_sided",
391 &MaterialExperimental::double_sided,
392 "Whether the material is double-sided")
393 .def_rw("extensions", &MaterialExperimental::extensions, "Material extensions");
394
395 nb::enum_<MaterialExperimental::AlphaMode>(material, "AlphaMode", "Alpha mode")
396 .value(
397 "Opaque",
398 MaterialExperimental::AlphaMode::Opaque,
399 "Alpha is ignored, and rendered output is opaque")
400 .value(
401 "Mask",
402 MaterialExperimental::AlphaMode::Mask,
403 "Output is either opaque or transparent depending on the alpha value and the "
404 "alpha_cutoff value")
405 .value(
406 "Blend",
407 MaterialExperimental::AlphaMode::Blend,
408 "Alpha value is used to composite source and destination");
409
410
411 nb::class_<Texture> texture(m, "Texture", "Texture");
412 texture.def(nb::init<>())
413 .def("__repr__", [](const Texture& self) { return scene::internal::to_string(self); })
414 .def_rw("name", &Texture::name, "Texture name")
415 .def_prop_rw(
416 "image",
417 [](Texture& self) -> std::optional<ElementId> {
418 if (self.image != invalid_element)
419 return self.image;
420 else
421 return {};
422 },
423 [](Texture& self, ElementId img) { self.image = img; },
424 "Index of image in scene.images vector (None if invalid)")
425 .def_rw(
426 "mag_filter",
427 &Texture::mag_filter,
428 "Texture magnification filter, used when texture appears larger on screen than the "
429 "source image")
430 .def_rw(
431 "min_filter",
432 &Texture::min_filter,
433 "Texture minification filter, used when the texture appears smaller on screen than the "
434 "source image")
435 .def_rw("wrap_u", &Texture::wrap_u, "Texture wrap mode for U coordinate")
436 .def_rw("wrap_v", &Texture::wrap_v, "Texture wrap mode for V coordinate")
437 .def_rw("scale", &Texture::scale, "Texture scale")
438 .def_rw("offset", &Texture::offset, "Texture offset")
439 .def_rw("rotation", &Texture::rotation, "Texture rotation")
440 .def_rw("extensions", &Texture::extensions, "Texture extensions");
441
442 nb::enum_<Texture::WrapMode>(texture, "WrapMode", "Texture wrap mode")
443 .value("Wrap", Texture::WrapMode::Wrap, "u|v becomes u%1|v%1")
444 .value(
445 "Clamp",
446 Texture::WrapMode::Clamp,
447 "Coordinates outside [0, 1] are clamped to the nearest value")
448 .value(
449 "Decal",
450 Texture::WrapMode::Decal,
451 "If the texture coordinates for a pixel are outside [0, 1], the texture is not applied")
452 .value("Mirror", Texture::WrapMode::Mirror, "Mirror wrap mode");
453 nb::enum_<Texture::TextureFilter>(texture, "TextureFilter", "Texture filter mode")
454 .value("Undefined", Texture::TextureFilter::Undefined, "Undefined filter")
455 .value("Nearest", Texture::TextureFilter::Nearest, "Nearest neighbor filtering")
456 .value("Linear", Texture::TextureFilter::Linear, "Linear filtering")
457 .value(
458 "NearestMipmapNearest",
459 Texture::TextureFilter::NearestMipmapNearest,
460 "Nearest mipmap nearest filtering")
461 .value(
462 "LinearMipmapNearest",
463 Texture::TextureFilter::LinearMipmapNearest,
464 "Linear mipmap nearest filtering")
465 .value(
466 "NearestMipmapLinear",
467 Texture::TextureFilter::NearestMipmapLinear,
468 "Nearest mipmap linear filtering")
469 .value(
470 "LinearMipmapLinear",
471 Texture::TextureFilter::LinearMipmapLinear,
472 "Linear mipmap linear filtering");
473
474 nb::class_<Light> light(m, "Light", "Light");
475 light.def(nb::init<>())
476 .def("__repr__", [](const Light& self) { return scene::internal::to_string(self); })
477 .def_rw("name", &Light::name, "Light name")
478 .def_rw("type", &Light::type, "Light type")
479 .def_rw(
480 "position",
481 &Light::position,
482 "Light position. Note that the light is part of the scene graph, and has an associated "
483 "transform in its node. This value is relative to the coordinate system defined by the "
484 "node")
485 .def_rw("direction", &Light::direction, "Light direction")
486 .def_rw("up", &Light::up, "Light up vector")
487 .def_rw("intensity", &Light::intensity, "Light intensity")
488 .def_rw(
489 "attenuation_constant",
490 &Light::attenuation_constant,
491 "Attenuation constant. Intensity of light at a given distance 'd' is: intensity / "
492 "(attenuation_constant + attenuation_linear * d + attenuation_quadratic * d * d + "
493 "attenuation_cubic * d * d * d)")
494 .def_rw("attenuation_linear", &Light::attenuation_linear, "Linear attenuation factor")
495 .def_rw(
496 "attenuation_quadratic",
497 &Light::attenuation_quadratic,
498 "Quadratic attenuation factor")
499 .def_rw("attenuation_cubic", &Light::attenuation_cubic, "Cubic attenuation factor")
500 .def_rw(
501 "range",
502 &Light::range,
503 "Range is defined for point and spot lights. It defines a distance cutoff at which the "
504 "light intensity is to be considered zero. When the value is 0, range is assumed to be "
505 "infinite")
506 .def_rw("color_diffuse", &Light::color_diffuse, "Diffuse color")
507 .def_rw("color_specular", &Light::color_specular, "Specular color")
508 .def_rw("color_ambient", &Light::color_ambient, "Ambient color")
509 .def_rw(
510 "angle_inner_cone",
511 &Light::angle_inner_cone,
512 "Inner angle of a spot light's light cone. 2PI for point lights, undefined for "
513 "directional lights")
514 .def_rw(
515 "angle_outer_cone",
516 &Light::angle_outer_cone,
517 "Outer angle of a spot light's light cone. 2PI for point lights, undefined for "
518 "directional lights")
519 .def_rw("size", &Light::size, "Size of area light source")
520 .def_rw("extensions", &Light::extensions, "Light extensions");
521
522 nb::enum_<Light::Type>(light, "Type", "Light type")
523 .value("Undefined", Light::Type::Undefined, "Undefined light type")
524 .value("Directional", Light::Type::Directional, "Directional light")
525 .value("Point", Light::Type::Point, "Point light")
526 .value("Spot", Light::Type::Spot, "Spot light")
527 .value("Ambient", Light::Type::Ambient, "Ambient light")
528 .value("Area", Light::Type::Area, "Area light");
529
530 nb::class_<Camera> camera(m, "Camera", "Camera");
531 camera.def(nb::init<>())
532 .def("__repr__", [](const Camera& self) { return scene::internal::to_string(self); })
533 .def_rw("name", &Camera::name, "Camera name")
534 .def_rw(
535 "position",
536 &Camera::position,
537 "Camera position. Note that the camera is part of the scene graph, and has an "
538 "associated transform in its node. This value is relative to the coordinate system "
539 "defined by the node")
540 .def_rw("up", &Camera::up, "Camera up vector")
541 .def_rw("look_at", &Camera::look_at, "Camera look-at point")
542 .def_rw(
543 "near_plane",
544 &Camera::near_plane,
545 "Distance of the near clipping plane. This value cannot be 0")
546 .def_rw("far_plane", &Camera::far_plane, "Distance of the far clipping plane")
547 .def_rw("type", &Camera::type, "Camera type")
548 .def_rw(
549 "aspect_ratio",
550 &Camera::aspect_ratio,
551 "Screen aspect ratio. This is the value of width / height of the screen. aspect_ratio "
552 "= tan(horizontal_fov / 2) / tan(vertical_fov / 2)")
553 .def_rw(
554 "horizontal_fov",
555 &Camera::horizontal_fov,
556 "Horizontal field of view angle, in radians. This is the angle between the left and "
557 "right borders of the viewport. It should not be greater than Pi. fov is only defined "
558 "when the camera type is perspective, otherwise it should be 0")
559 .def_rw(
560 "orthographic_width",
561 &Camera::orthographic_width,
562 "Half width of the orthographic view box. Or horizontal magnification. This is only "
563 "defined when the camera type is orthographic, otherwise it should be 0")
564 .def_prop_ro(
565 "get_vertical_fov",
566 &Camera::get_vertical_fov,
567 "Get the vertical field of view. Make sure aspect_ratio is set before calling this")
568 .def(
569 "set_horizontal_fov_from_vertical_fov",
570 &Camera::set_horizontal_fov_from_vertical_fov,
571 "vfov"_a,
572 "Set horizontal fov from vertical fov. Make sure aspect_ratio is set before calling "
573 "this")
574 .def_rw("extensions", &Camera::extensions, "Camera extensions");
575
576 nb::enum_<Camera::Type>(camera, "Type", "Camera type")
577 .value("Perspective", Camera::Type::Perspective, "Perspective projection")
578 .value("Orthographic", Camera::Type::Orthographic, "Orthographic projection");
579
580 nb::class_<Animation>(m, "Animation", "Animation")
581 .def(nb::init<>())
582 .def("__repr__", [](const Animation& self) { return scene::internal::to_string(self); })
583 .def_rw("name", &Animation::name, "Animation name")
584 .def_rw("extensions", &Animation::extensions, "Animation extensions");
585
586
587 nb::class_<Skeleton>(m, "Skeleton", "Skeleton")
588 .def(nb::init<>())
589 .def("__repr__", [](const Skeleton& self) { return scene::internal::to_string(self); })
590 .def_rw(
591 "meshes",
592 &Skeleton::meshes,
593 "This skeleton is used to deform those meshes. This will typically contain one value, "
594 "but can have zero or multiple meshes. The value is the index in the scene meshes")
595 .def_rw("extensions", &Skeleton::extensions, "Skeleton extensions");
596
597
598 nb::class_<SceneType>(m, "Scene", "A 3D scene")
599 .def(nb::init<>())
600 .def("__repr__", [](const SceneType& self) { return scene::internal::to_string(self); })
601 .def_rw("name", &SceneType::name, "Name of the scene")
602 .def_rw(
603 "nodes",
604 &SceneType::nodes,
605 "Scene nodes. This is a list of nodes, the hierarchy information is contained by each "
606 "node having a list of children as indices to this vector")
607 .def_rw(
608 "root_nodes",
609 &SceneType::root_nodes,
610 "Root nodes. This is typically one. Must be at least one")
611 .def_rw("meshes", &SceneType::meshes, "Scene meshes")
612 .def_rw("images", &SceneType::images, "Images")
613 .def_rw("textures", &SceneType::textures, "Textures. They can reference images")
614 .def_rw("materials", &SceneType::materials, "Materials. They can reference textures")
615 .def_rw("lights", &SceneType::lights, "Lights in the scene")
616 .def_rw(
617 "cameras",
618 &SceneType::cameras,
619 "Cameras. The first camera (if any) is the default camera view")
620 .def_rw("skeletons", &SceneType::skeletons, "Scene skeletons")
621 .def_rw("animations", &SceneType::animations, "Animations (unused for now)")
622 .def_rw("extensions", &SceneType::extensions, "Scene extensions")
623 .def(
624 "add",
625 [](SceneType& self,
626 std::variant<
627 Node,
628 SceneType::MeshType,
629 ImageExperimental,
630 Texture,
631 MaterialExperimental,
632 Light,
633 Camera,
634 Skeleton,
635 Animation> element) {
636 return std::visit(
637 [&](auto&& value) {
638 using T = std::decay_t<decltype(value)>;
639 return self.add(std::forward<T>(value));
640 },
641 element);
642 },
643 "element"_a,
644 R"(Add an element to the scene.
645
646:param element: The element to add to the scene. E.g. node, mesh, image, texture, material, light, camera, skeleton, or animation.
647
648:returns: The id of the added element.)")
649 .def(
650 "add_child",
651 &SceneType::add_child,
652 "parent_id"_a,
653 "child_id"_a,
654 R"(Add a child node to a parent node. The parent-child relationship will be updated for both nodes.
655
656:param parent_id: The parent node id.
657:param child_id: The child node id.
658
659:returns: The id of the added child node.)");
660
661 m.def(
662 "compute_global_node_transform",
663 [](const SceneType& scene, size_t node_idx) {
664 auto t = utils::compute_global_node_transform<Scalar, Index>(scene, node_idx);
665 return nb::ndarray<nb::numpy, float, nb::f_contig, nb::shape<4, 4>>(
666 t.data(),
667 {4, 4},
668 nb::handle(), // owner
669 {1, 4});
670 },
671 nb::rv_policy::copy,
672 "scene"_a,
673 "node_idx"_a,
674 R"(Compute the global transform associated with a node.
675
676:param scene: The input scene.
677:param node_idx: The index of the target node.
678
679:returns: The global transform of the target node, which is the combination of transforms from this node all the way to the root.
680 )");
681
682 m.def(
683 "scene_to_mesh",
684 [](const SceneType& scene,
685 bool normalize_normals,
686 bool normalize_tangents_bitangents,
687 bool preserve_attributes) {
688 TransformOptions transform_options;
689 transform_options.normalize_normals = normalize_normals;
690 transform_options.normalize_tangents_bitangents = normalize_tangents_bitangents;
691 return scene::scene_to_mesh(scene, transform_options, preserve_attributes);
692 },
693 "scene"_a,
694 "normalize_normals"_a = TransformOptions{}.normalize_normals,
695 "normalize_tangents_bitangents"_a = TransformOptions{}.normalize_tangents_bitangents,
696 "preserve_attributes"_a = true,
697 R"(Converts a scene into a concatenated mesh with all the transforms applied.
698
699:param scene: Scene to convert.
700:param normalize_normals: If enabled, normals are normalized after transformation.
701:param normalize_tangents_bitangents: If enabled, tangents and bitangents are normalized after transformation.
702:param preserve_attributes: Preserve shared attributes and map them to the output mesh.
703
704:return: Concatenated mesh.)");
705
706 m.def(
707 "mesh_to_scene",
708 [](const SceneType::MeshType& mesh) { return scene::mesh_to_scene(mesh); },
709 "mesh"_a,
710 R"(Converts a single mesh into a scene with a single identity instance of the input mesh.
711
712:param mesh: Input mesh to convert.
713
714:return: Scene containing the input mesh.)");
715
716 m.def(
717 "meshes_to_scene",
718 [](std::vector<SceneType::MeshType> meshes) {
719 return scene::meshes_to_scene(std::move(meshes));
720 },
721 "meshes"_a,
722 R"(Converts a list of meshes into a scene with a single identity instance of each input mesh.
723
724:param meshes: Input meshes to convert.
725
726:return: Scene containing the input meshes.)");
727}
728
729} // namespace lagrange::python
LA_CORE_API spdlog::logger & logger()
Retrieves the current logger.
Definition Logger.cpp:40
@ Scalar
Mesh attribute must have exactly 1 channel.
Definition AttributeFwd.h:56
#define la_runtime_assert(...)
Runtime assertion check.
Definition assert.h:174
bool normalize_normals
If enabled, normals are normalized after transformation.
Definition TransformOptions.h:31
bool normalize_tangents_bitangents
If enabled, tangents and bitangents are normalized after transformation.
Definition TransformOptions.h:34
size_t height
Image height.
Definition Scene.h:95
size_t width
Image width.
Definition Scene.h:92
AttributeValueType element_type
The scalar type of the elements in the buffer.
Definition Scene.h:101
std::vector< unsigned char > data
Raw buffer of size (width * height * num_channels * num_bits_per_element / 8) bytes containing image ...
Definition Scene.h:104
size_t num_channels
Number of image channels (must be 1, 3, or 4).
Definition Scene.h:98
fs::path uri
Image file path.
Definition Scene.h:125
ElementId index
Texture index. Index in scene.textures vector.
Definition Scene.h:137