14#include <lagrange/AttributeValueType.h>
15#include <lagrange/Logger.h>
16#include <lagrange/python/binding.h>
17#include <lagrange/python/tensor_utils.h>
18#include <lagrange/scene/Scene.h>
19#include <lagrange/scene/internal/scene_string_utils.h>
20#include <lagrange/scene/scene_convert.h>
21#include <lagrange/scene/scene_utils.h>
22#include <lagrange/utils/assert.h>
24#include "bind_value.h"
26namespace lagrange::python {
28namespace nb = nanobind;
30void bind_scene(nb::module_& m)
32 using namespace lagrange::scene;
34 using Index = uint32_t;
35 using SceneType = Scene<Scalar, Index>;
37 nb::bind_vector<SafeVector<ElementId>>(m,
"ElementIdList");
38 nb::bind_safe_vector<SafeVector<Node>>(m,
"NodeList");
39 nb::bind_safe_vector<SafeVector<SceneMeshInstance>>(m,
"SceneMeshInstanceList");
40 nb::bind_safe_vector<SafeVector<SurfaceMesh<Scalar, Index>>>(m,
"SurfaceMeshList");
41 nb::bind_safe_vector<SafeVector<ImageExperimental>>(m,
"ImageList");
42 nb::bind_safe_vector<SafeVector<Texture>>(m,
"TextureList");
43 nb::bind_safe_vector<SafeVector<MaterialExperimental>>(m,
"MaterialList");
44 nb::bind_safe_vector<SafeVector<Light>>(m,
"LightList");
45 nb::bind_safe_vector<SafeVector<Camera>>(m,
"CameraList");
46 nb::bind_safe_vector<SafeVector<Skeleton>>(m,
"SkeletonList");
47 nb::bind_safe_vector<SafeVector<Animation>>(m,
"AnimationList");
49 nb::class_<lagrange::scene::Extensions>(m,
"Extensions")
52 [](
const lagrange::scene::Extensions& self) {
53 return scene::internal::to_string(self);
55 .def_prop_ro(
"size", &Extensions::size)
56 .def_prop_ro(
"empty", &Extensions::empty)
60 nb::rv_policy::reference_internal,
61 "Raw data stored in this extension as a dict");
63 nb::class_<SceneMeshInstance>(
66 "Pairs a mesh with its materials (zero, one, or more)")
70 [](
const SceneMeshInstance& self) {
return scene::internal::to_string(self); })
73 [](SceneMeshInstance& self) -> std::optional<ElementId> {
74 if (self.mesh != invalid_element)
79 [](SceneMeshInstance& self, ElementId mesh) { self.mesh = mesh; },
80 "Mesh index. Has to be a valid index in the scene.meshes vector (None if invalid)")
83 &SceneMeshInstance::materials,
84 "Material indices in the scene.materials vector. This is typically a single material "
85 "index. When a single mesh uses multiple materials, the AttributeName::material_id "
86 "facet attribute should be defined.");
88 nb::class_<Node>(m,
"Node",
"Represents a node in the scene hierarchy")
90 .def(
"__repr__", [](
const Node& self) {
return scene::internal::to_string(self); })
91 .def_rw(
"name", &Node::name,
"Node name. May not be unique and can be empty")
95 return nb::ndarray<nb::numpy, float, nb::f_contig, nb::shape<4, 4>>(
96 node.transform.data(),
101 [](Node& node, nb::ndarray<nb::numpy, const float, nb::shape<4, 4>> t) ->
void {
102 auto view = t.view<float, nb::ndim<2>>();
104 for (
size_t i = 0; i < 4; i++) {
105 for (
size_t j = 0; j < 4; j++) {
106 node.transform.data()[i + j * 4] = view(i, j);
110 "Transform of the node, relative to its parent")
113 [](Node& node) -> std::optional<ElementId> {
114 if (node.parent != invalid_element)
119 [](Node& node, ElementId parent) { node.parent = parent; },
120 "Parent index. May be invalid if the node has no parent (e.g. the root)")
121 .def_rw(
"children", &Node::children,
"Children indices. May be empty")
122 .def_rw(
"meshes", &Node::meshes,
"List of meshes contained in this node")
123 .def_rw(
"cameras", &Node::cameras,
"List of cameras contained in this node")
124 .def_rw(
"lights", &Node::lights,
"List of lights contained in this node")
125 .def_rw(
"extensions", &Node::extensions);
127 nb::class_<ImageBufferExperimental> image_buffer(
130 "Minimalistic image data structure that stores the raw image data");
131 image_buffer.def(nb::init<>())
134 [](
const ImageBufferExperimental& self) {
return scene::internal::to_string(self); })
135 .def_ro(
"width", &ImageBufferExperimental::width,
"Image width")
136 .def_ro(
"height", &ImageBufferExperimental::height,
"Image height")
139 &ImageBufferExperimental::num_channels,
140 "Number of image channels (must be 1, 3, or 4)")
143 [](ImageBufferExperimental& self) {
146 case AttributeValueType::e_int8_t:
148 nb::ndarray<int8_t, nb::numpy, nb::c_contig, nb::device::cpu>(
149 reinterpret_cast<int8_t*
>(self.
data.data()),
153 nb::rv_policy::reference_internal);
154 case AttributeValueType::e_uint8_t:
156 nb::ndarray<uint8_t, nb::numpy, nb::c_contig, nb::device::cpu>(
157 reinterpret_cast<uint8_t*
>(self.
data.data()),
161 nb::rv_policy::reference_internal);
162 case AttributeValueType::e_int16_t:
164 nb::ndarray<int16_t, nb::numpy, nb::c_contig, nb::device::cpu>(
165 reinterpret_cast<int16_t*
>(self.
data.data()),
169 nb::rv_policy::reference_internal);
170 case AttributeValueType::e_uint16_t:
172 nb::ndarray<uint16_t, nb::numpy, nb::c_contig, nb::device::cpu>(
173 reinterpret_cast<uint16_t*
>(self.
data.data()),
177 nb::rv_policy::reference_internal);
178 case AttributeValueType::e_int32_t:
180 nb::ndarray<int32_t, nb::numpy, nb::c_contig, nb::device::cpu>(
181 reinterpret_cast<int32_t*
>(self.
data.data()),
185 nb::rv_policy::reference_internal);
186 case AttributeValueType::e_uint32_t:
188 nb::ndarray<uint32_t, nb::numpy, nb::c_contig, nb::device::cpu>(
189 reinterpret_cast<uint32_t*
>(self.
data.data()),
193 nb::rv_policy::reference_internal);
194 case AttributeValueType::e_int64_t:
196 nb::ndarray<int64_t, nb::numpy, nb::c_contig, nb::device::cpu>(
197 reinterpret_cast<int64_t*
>(self.
data.data()),
201 nb::rv_policy::reference_internal);
202 case AttributeValueType::e_uint64_t:
204 nb::ndarray<uint64_t, nb::numpy, nb::c_contig, nb::device::cpu>(
205 reinterpret_cast<uint64_t*
>(self.
data.data()),
209 nb::rv_policy::reference_internal);
210 case AttributeValueType::e_float:
212 nb::ndarray<float, nb::numpy, nb::c_contig, nb::device::cpu>(
213 reinterpret_cast<float*
>(self.
data.data()),
217 nb::rv_policy::reference_internal);
218 case AttributeValueType::e_double:
220 nb::ndarray<double, nb::numpy, nb::c_contig, nb::device::cpu>(
221 reinterpret_cast<double*
>(self.
data.data()),
225 nb::rv_policy::reference_internal);
226 default:
throw nb::type_error(
"Unsupported image buffer `dtype`!");
229 [](ImageBufferExperimental& self,
230 nb::ndarray<nb::numpy, nb::c_contig, nb::device::cpu> tensor) {
232 self.
width = tensor.shape(1);
233 self.
height = tensor.shape(0);
235 auto dtype = tensor.dtype();
236 if (dtype == nb::dtype<int8_t>()) {
238 }
else if (dtype == nb::dtype<uint8_t>()) {
240 }
else if (dtype == nb::dtype<int16_t>()) {
242 }
else if (dtype == nb::dtype<uint16_t>()) {
244 }
else if (dtype == nb::dtype<int32_t>()) {
246 }
else if (dtype == nb::dtype<uint32_t>()) {
248 }
else if (dtype == nb::dtype<int64_t>()) {
250 }
else if (dtype == nb::dtype<uint64_t>()) {
252 }
else if (dtype == nb::dtype<float>()) {
254 }
else if (dtype == nb::dtype<double>()) {
257 throw nb::type_error(
"Unsupported input tensor `dtype`!");
259 self.
data.resize(tensor.nbytes());
261 reinterpret_cast<uint8_t*
>(tensor.data()),
262 reinterpret_cast<uint8_t*
>(tensor.data()) + tensor.nbytes(),
265 "Raw buffer of size (width * height * num_channels * num_bits_per_element / 8) bytes "
266 "containing image data")
269 [](ImageBufferExperimental& self) -> std::optional<nb::type_object> {
270 auto np = nb::module_::import_(
"numpy");
272 case AttributeValueType::e_int8_t:
return np.attr(
"int8");
273 case AttributeValueType::e_int16_t:
return np.attr(
"int16");
274 case AttributeValueType::e_int32_t:
return np.attr(
"int32");
275 case AttributeValueType::e_int64_t:
return np.attr(
"int64");
276 case AttributeValueType::e_uint8_t:
return np.attr(
"uint8");
277 case AttributeValueType::e_uint16_t:
return np.attr(
"uint16");
278 case AttributeValueType::e_uint32_t:
return np.attr(
"uint32");
279 case AttributeValueType::e_uint64_t:
return np.attr(
"uint64");
280 case AttributeValueType::e_float:
return np.attr(
"float32");
281 case AttributeValueType::e_double:
return np.attr(
"float64");
282 default:
logger().warn(
"Image buffer has an unknown dtype.");
return std::nullopt;
285 "The scalar type of the elements in the buffer");
287 nb::class_<ImageExperimental> image(
290 "Image structure that can store either image data or reference to an image file");
291 image.def(nb::init<>())
294 [](
const ImageExperimental& self) {
return scene::internal::to_string(self); })
297 &ImageExperimental::name,
298 "Image name. Not guaranteed to be unique and can be empty")
299 .def_rw(
"image", &ImageExperimental::image,
"Image data")
302 [](
const ImageExperimental& self) -> std::optional<std::string> {
303 if (self.
uri.empty())
306 return self.
uri.string();
308 [](ImageExperimental& self, std::optional<std::string> uri) {
310 self.
uri = fs::path(uri.value());
312 self.
uri = fs::path();
314 "Image file path. This path is relative to the file that contains the scene. It is "
315 "only valid if image data should be mapped to an external file")
316 .def_rw(
"extensions", &ImageExperimental::extensions,
"Image extensions");
318 nb::class_<TextureInfo>(
321 "Pair of texture index (which texture to use) and texture coordinate index (which set of "
324 .def(
"__repr__", [](
const TextureInfo& self) {
return scene::internal::to_string(self); })
327 [](
const TextureInfo& self) -> std::optional<ElementId> {
328 if (self.
index != invalid_element)
333 [](TextureInfo& self, std::optional<ElementId> index) {
334 if (index.has_value())
335 self.
index = index.value();
337 self.
index = invalid_element;
339 "Texture index. Index in scene.textures vector. `None` if not set")
342 &TextureInfo::texcoord,
343 "Index of UV coordinates. Usually stored in the mesh as `texcoord_x` attribute where x "
344 "is this variable. This is typically 0");
346 nb::class_<MaterialExperimental> material(
349 "PBR material, based on the gltf specification. This is subject to change, to support more "
351 material.def(nb::init<>())
354 [](
const MaterialExperimental& self) {
return scene::internal::to_string(self); })
357 &MaterialExperimental::name,
358 "Material name. May not be unique, and can be empty")
359 .def_rw(
"base_color_value", &MaterialExperimental::base_color_value,
"Base color value")
361 "base_color_texture",
362 &MaterialExperimental::base_color_texture,
363 "Base color texture")
366 &MaterialExperimental::alpha_mode,
367 "The alpha mode specifies how to interpret the alpha value of the base color")
368 .def_rw(
"alpha_cutoff", &MaterialExperimental::alpha_cutoff,
"Alpha cutoff value")
369 .def_rw(
"emissive_value", &MaterialExperimental::emissive_value,
"Emissive color value")
370 .def_rw(
"emissive_texture", &MaterialExperimental::emissive_texture,
"Emissive texture")
371 .def_rw(
"metallic_value", &MaterialExperimental::metallic_value,
"Metallic value")
372 .def_rw(
"roughness_value", &MaterialExperimental::roughness_value,
"Roughness value")
374 "metallic_roughness_texture",
375 &MaterialExperimental::metallic_roughness_texture,
376 "Metalness and roughness are packed together in a single texture. Green channel has "
377 "roughness, blue channel has metalness")
378 .def_rw(
"normal_texture", &MaterialExperimental::normal_texture,
"Normal texture")
381 &MaterialExperimental::normal_scale,
382 "Normal scaling factor. normal = normalize(<sampled tex value> * 2 - 1) * vec3(scale, "
384 .def_rw(
"occlusion_texture", &MaterialExperimental::occlusion_texture,
"Occlusion texture")
386 "occlusion_strength",
387 &MaterialExperimental::occlusion_strength,
388 "Occlusion strength. color = lerp(color, color * <sampled tex value>, strength)")
391 &MaterialExperimental::double_sided,
392 "Whether the material is double-sided")
393 .def_rw(
"extensions", &MaterialExperimental::extensions,
"Material extensions");
395 nb::enum_<MaterialExperimental::AlphaMode>(material,
"AlphaMode",
"Alpha mode")
398 MaterialExperimental::AlphaMode::Opaque,
399 "Alpha is ignored, and rendered output is opaque")
402 MaterialExperimental::AlphaMode::Mask,
403 "Output is either opaque or transparent depending on the alpha value and the "
404 "alpha_cutoff value")
407 MaterialExperimental::AlphaMode::Blend,
408 "Alpha value is used to composite source and destination");
411 nb::class_<Texture> texture(m,
"Texture",
"Texture");
412 texture.def(nb::init<>())
413 .def(
"__repr__", [](
const Texture& self) {
return scene::internal::to_string(self); })
414 .def_rw(
"name", &Texture::name,
"Texture name")
417 [](Texture& self) -> std::optional<ElementId> {
418 if (self.image != invalid_element)
423 [](Texture& self, ElementId img) { self.image = img; },
424 "Index of image in scene.images vector (None if invalid)")
427 &Texture::mag_filter,
428 "Texture magnification filter, used when texture appears larger on screen than the "
432 &Texture::min_filter,
433 "Texture minification filter, used when the texture appears smaller on screen than the "
435 .def_rw(
"wrap_u", &Texture::wrap_u,
"Texture wrap mode for U coordinate")
436 .def_rw(
"wrap_v", &Texture::wrap_v,
"Texture wrap mode for V coordinate")
437 .def_rw(
"scale", &Texture::scale,
"Texture scale")
438 .def_rw(
"offset", &Texture::offset,
"Texture offset")
439 .def_rw(
"rotation", &Texture::rotation,
"Texture rotation")
440 .def_rw(
"extensions", &Texture::extensions,
"Texture extensions");
442 nb::enum_<Texture::WrapMode>(texture,
"WrapMode",
"Texture wrap mode")
443 .value(
"Wrap", Texture::WrapMode::Wrap,
"u|v becomes u%1|v%1")
446 Texture::WrapMode::Clamp,
447 "Coordinates outside [0, 1] are clamped to the nearest value")
450 Texture::WrapMode::Decal,
451 "If the texture coordinates for a pixel are outside [0, 1], the texture is not applied")
452 .value(
"Mirror", Texture::WrapMode::Mirror,
"Mirror wrap mode");
453 nb::enum_<Texture::TextureFilter>(texture,
"TextureFilter",
"Texture filter mode")
454 .value(
"Undefined", Texture::TextureFilter::Undefined,
"Undefined filter")
455 .value(
"Nearest", Texture::TextureFilter::Nearest,
"Nearest neighbor filtering")
456 .value(
"Linear", Texture::TextureFilter::Linear,
"Linear filtering")
458 "NearestMipmapNearest",
459 Texture::TextureFilter::NearestMipmapNearest,
460 "Nearest mipmap nearest filtering")
462 "LinearMipmapNearest",
463 Texture::TextureFilter::LinearMipmapNearest,
464 "Linear mipmap nearest filtering")
466 "NearestMipmapLinear",
467 Texture::TextureFilter::NearestMipmapLinear,
468 "Nearest mipmap linear filtering")
470 "LinearMipmapLinear",
471 Texture::TextureFilter::LinearMipmapLinear,
472 "Linear mipmap linear filtering");
474 nb::class_<Light> light(m,
"Light",
"Light");
475 light.def(nb::init<>())
476 .def(
"__repr__", [](
const Light& self) {
return scene::internal::to_string(self); })
477 .def_rw(
"name", &Light::name,
"Light name")
478 .def_rw(
"type", &Light::type,
"Light type")
482 "Light position. Note that the light is part of the scene graph, and has an associated "
483 "transform in its node. This value is relative to the coordinate system defined by the "
485 .def_rw(
"direction", &Light::direction,
"Light direction")
486 .def_rw(
"up", &Light::up,
"Light up vector")
487 .def_rw(
"intensity", &Light::intensity,
"Light intensity")
489 "attenuation_constant",
490 &Light::attenuation_constant,
491 "Attenuation constant. Intensity of light at a given distance 'd' is: intensity / "
492 "(attenuation_constant + attenuation_linear * d + attenuation_quadratic * d * d + "
493 "attenuation_cubic * d * d * d)")
494 .def_rw(
"attenuation_linear", &Light::attenuation_linear,
"Linear attenuation factor")
496 "attenuation_quadratic",
497 &Light::attenuation_quadratic,
498 "Quadratic attenuation factor")
499 .def_rw(
"attenuation_cubic", &Light::attenuation_cubic,
"Cubic attenuation factor")
503 "Range is defined for point and spot lights. It defines a distance cutoff at which the "
504 "light intensity is to be considered zero. When the value is 0, range is assumed to be "
506 .def_rw(
"color_diffuse", &Light::color_diffuse,
"Diffuse color")
507 .def_rw(
"color_specular", &Light::color_specular,
"Specular color")
508 .def_rw(
"color_ambient", &Light::color_ambient,
"Ambient color")
511 &Light::angle_inner_cone,
512 "Inner angle of a spot light's light cone. 2PI for point lights, undefined for "
513 "directional lights")
516 &Light::angle_outer_cone,
517 "Outer angle of a spot light's light cone. 2PI for point lights, undefined for "
518 "directional lights")
519 .def_rw(
"size", &Light::size,
"Size of area light source")
520 .def_rw(
"extensions", &Light::extensions,
"Light extensions");
522 nb::enum_<Light::Type>(light,
"Type",
"Light type")
523 .value(
"Undefined", Light::Type::Undefined,
"Undefined light type")
524 .value(
"Directional", Light::Type::Directional,
"Directional light")
525 .value(
"Point", Light::Type::Point,
"Point light")
526 .value(
"Spot", Light::Type::Spot,
"Spot light")
527 .value(
"Ambient", Light::Type::Ambient,
"Ambient light")
528 .value(
"Area", Light::Type::Area,
"Area light");
530 nb::class_<Camera> camera(m,
"Camera",
"Camera");
531 camera.def(nb::init<>())
532 .def(
"__repr__", [](
const Camera& self) {
return scene::internal::to_string(self); })
533 .def_rw(
"name", &Camera::name,
"Camera name")
537 "Camera position. Note that the camera is part of the scene graph, and has an "
538 "associated transform in its node. This value is relative to the coordinate system "
539 "defined by the node")
540 .def_rw(
"up", &Camera::up,
"Camera up vector")
541 .def_rw(
"look_at", &Camera::look_at,
"Camera look-at point")
545 "Distance of the near clipping plane. This value cannot be 0")
546 .def_rw(
"far_plane", &Camera::far_plane,
"Distance of the far clipping plane")
547 .def_rw(
"type", &Camera::type,
"Camera type")
550 &Camera::aspect_ratio,
551 "Screen aspect ratio. This is the value of width / height of the screen. aspect_ratio "
552 "= tan(horizontal_fov / 2) / tan(vertical_fov / 2)")
555 &Camera::horizontal_fov,
556 "Horizontal field of view angle, in radians. This is the angle between the left and "
557 "right borders of the viewport. It should not be greater than Pi. fov is only defined "
558 "when the camera type is perspective, otherwise it should be 0")
560 "orthographic_width",
561 &Camera::orthographic_width,
562 "Half width of the orthographic view box. Or horizontal magnification. This is only "
563 "defined when the camera type is orthographic, otherwise it should be 0")
566 &Camera::get_vertical_fov,
567 "Get the vertical field of view. Make sure aspect_ratio is set before calling this")
569 "set_horizontal_fov_from_vertical_fov",
570 &Camera::set_horizontal_fov_from_vertical_fov,
572 "Set horizontal fov from vertical fov. Make sure aspect_ratio is set before calling "
574 .def_rw(
"extensions", &Camera::extensions,
"Camera extensions");
576 nb::enum_<Camera::Type>(camera,
"Type",
"Camera type")
577 .value(
"Perspective", Camera::Type::Perspective,
"Perspective projection")
578 .value(
"Orthographic", Camera::Type::Orthographic,
"Orthographic projection");
580 nb::class_<Animation>(m,
"Animation",
"Animation")
582 .def(
"__repr__", [](
const Animation& self) {
return scene::internal::to_string(self); })
583 .def_rw(
"name", &Animation::name,
"Animation name")
584 .def_rw(
"extensions", &Animation::extensions,
"Animation extensions");
587 nb::class_<Skeleton>(m,
"Skeleton",
"Skeleton")
589 .def(
"__repr__", [](
const Skeleton& self) {
return scene::internal::to_string(self); })
593 "This skeleton is used to deform those meshes. This will typically contain one value, "
594 "but can have zero or multiple meshes. The value is the index in the scene meshes")
595 .def_rw(
"extensions", &Skeleton::extensions,
"Skeleton extensions");
598 nb::class_<SceneType>(m,
"Scene",
"A 3D scene")
600 .def(
"__repr__", [](
const SceneType& self) {
return scene::internal::to_string(self); })
601 .def_rw(
"name", &SceneType::name,
"Name of the scene")
605 "Scene nodes. This is a list of nodes, the hierarchy information is contained by each "
606 "node having a list of children as indices to this vector")
609 &SceneType::root_nodes,
610 "Root nodes. This is typically one. Must be at least one")
611 .def_rw(
"meshes", &SceneType::meshes,
"Scene meshes")
612 .def_rw(
"images", &SceneType::images,
"Images")
613 .def_rw(
"textures", &SceneType::textures,
"Textures. They can reference images")
614 .def_rw(
"materials", &SceneType::materials,
"Materials. They can reference textures")
615 .def_rw(
"lights", &SceneType::lights,
"Lights in the scene")
619 "Cameras. The first camera (if any) is the default camera view")
620 .def_rw(
"skeletons", &SceneType::skeletons,
"Scene skeletons")
621 .def_rw(
"animations", &SceneType::animations,
"Animations (unused for now)")
622 .def_rw(
"extensions", &SceneType::extensions,
"Scene extensions")
631 MaterialExperimental,
635 Animation> element) {
638 using T = std::decay_t<
decltype(value)>;
639 return self.add(std::forward<T>(value));
644 R
"(Add an element to the scene.
646:param element: The element to add to the scene. E.g. node, mesh, image, texture, material, light, camera, skeleton, or animation.
648:returns: The id of the added element.)")
651 &SceneType::add_child,
654 R
"(Add a child node to a parent node. The parent-child relationship will be updated for both nodes.
656:param parent_id: The parent node id.
657:param child_id: The child node id.
659:returns: The id of the added child node.)");
662 "compute_global_node_transform",
663 [](
const SceneType& scene,
size_t node_idx) {
664 auto t = utils::compute_global_node_transform<Scalar, Index>(scene, node_idx);
665 return nb::ndarray<nb::numpy, float, nb::f_contig, nb::shape<4, 4>>(
674 R
"(Compute the global transform associated with a node.
676:param scene: The input scene.
677:param node_idx: The index of the target node.
679:returns: The global transform of the target node, which is the combination of transforms from this node all the way to the root.
684 [](
const SceneType& scene,
685 bool normalize_normals,
686 bool normalize_tangents_bitangents,
687 bool preserve_attributes) {
688 TransformOptions transform_options;
691 return scene::scene_to_mesh(scene, transform_options, preserve_attributes);
694 "normalize_normals"_a = TransformOptions{}.normalize_normals,
695 "normalize_tangents_bitangents"_a = TransformOptions{}.normalize_tangents_bitangents,
696 "preserve_attributes"_a =
true,
697 R
"(Converts a scene into a concatenated mesh with all the transforms applied.
699:param scene: Scene to convert.
700:param normalize_normals: If enabled, normals are normalized after transformation.
701:param normalize_tangents_bitangents: If enabled, tangents and bitangents are normalized after transformation.
702:param preserve_attributes: Preserve shared attributes and map them to the output mesh.
704:return: Concatenated mesh.)");
708 [](
const SceneType::MeshType& mesh) {
return scene::mesh_to_scene(mesh); },
710 R
"(Converts a single mesh into a scene with a single identity instance of the input mesh.
712:param mesh: Input mesh to convert.
714:return: Scene containing the input mesh.)");
718 [](std::vector<SceneType::MeshType> meshes) {
719 return scene::meshes_to_scene(std::move(meshes));
722 R
"(Converts a list of meshes into a scene with a single identity instance of each input mesh.
724:param meshes: Input meshes to convert.
726:return: Scene containing the input meshes.)");
LA_CORE_API spdlog::logger & logger()
Retrieves the current logger.
Definition Logger.cpp:40
@ Scalar
Mesh attribute must have exactly 1 channel.
Definition AttributeFwd.h:56
#define la_runtime_assert(...)
Runtime assertion check.
Definition assert.h:174
size_t height
Image height.
Definition Scene.h:95
size_t width
Image width.
Definition Scene.h:92
AttributeValueType element_type
The scalar type of the elements in the buffer.
Definition Scene.h:101
std::vector< unsigned char > data
Raw buffer of size (width * height * num_channels * num_bits_per_element / 8) bytes containing image ...
Definition Scene.h:104
size_t num_channels
Number of image channels (must be 1, 3, or 4).
Definition Scene.h:98
fs::path uri
Image file path.
Definition Scene.h:125
ElementId index
Texture index. Index in scene.textures vector.
Definition Scene.h:137