Welcome to mirror list, hosted at ThFree Co, Russian Federation.

git.blender.org/blender-addons.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
Diffstat (limited to 'io_scene_gltf2')
-rwxr-xr-xio_scene_gltf2/__init__.py46
-rwxr-xr-xio_scene_gltf2/blender/com/gltf2_blender_math.py13
-rwxr-xr-xio_scene_gltf2/blender/exp/gltf2_blender_export.py8
-rwxr-xr-xio_scene_gltf2/blender/exp/gltf2_blender_extract.py814
-rwxr-xr-xio_scene_gltf2/blender/exp/gltf2_blender_gather_animation_sampler_keyframes.py10
-rwxr-xr-xio_scene_gltf2/blender/exp/gltf2_blender_gather_animation_samplers.py8
-rwxr-xr-xio_scene_gltf2/blender/exp/gltf2_blender_gather_cameras.py6
-rwxr-xr-xio_scene_gltf2/blender/exp/gltf2_blender_gather_joints.py22
-rwxr-xr-xio_scene_gltf2/blender/exp/gltf2_blender_gather_materials_pbr_metallic_roughness.py7
-rwxr-xr-xio_scene_gltf2/blender/exp/gltf2_blender_gather_nodes.py52
-rwxr-xr-xio_scene_gltf2/blender/exp/gltf2_blender_gather_primitive_attributes.py190
-rwxr-xr-xio_scene_gltf2/blender/exp/gltf2_blender_gather_primitives.py58
-rwxr-xr-xio_scene_gltf2/blender/exp/gltf2_blender_gather_skins.py9
-rwxr-xr-xio_scene_gltf2/blender/exp/gltf2_blender_utils.py67
-rwxr-xr-xio_scene_gltf2/blender/imp/gltf2_blender_KHR_materials_pbrSpecularGlossiness.py3
-rwxr-xr-xio_scene_gltf2/blender/imp/gltf2_blender_gltf.py4
-rwxr-xr-xio_scene_gltf2/blender/imp/gltf2_blender_mesh.py699
-rwxr-xr-xio_scene_gltf2/blender/imp/gltf2_blender_pbrMetallicRoughness.py9
-rwxr-xr-xio_scene_gltf2/blender/imp/gltf2_blender_primitive.py344
-rwxr-xr-xio_scene_gltf2/io/com/gltf2_io_constants.py12
-rwxr-xr-xio_scene_gltf2/io/imp/gltf2_io_binary.py136
-rwxr-xr-xio_scene_gltf2/io/imp/gltf2_io_gltf.py2
22 files changed, 1184 insertions, 1335 deletions
diff --git a/io_scene_gltf2/__init__.py b/io_scene_gltf2/__init__.py
index 3ea1ce11..74338ef0 100755
--- a/io_scene_gltf2/__init__.py
+++ b/io_scene_gltf2/__init__.py
@@ -14,8 +14,8 @@
bl_info = {
'name': 'glTF 2.0 format',
- 'author': 'Julien Duroure, Norbert Nopper, Urs Hanselmann, Moritz Becher, Benjamin Schmithüsen, Jim Eckerlein, and many external contributors',
- "version": (1, 3, 28),
+ 'author': 'Julien Duroure, Scurest, Norbert Nopper, Urs Hanselmann, Moritz Becher, Benjamin Schmithüsen, Jim Eckerlein, and many external contributors',
+ "version": (1, 4, 7),
'blender': (2, 90, 0),
'location': 'File > Import-Export',
'description': 'Import-Export as glTF 2.0',
@@ -114,10 +114,10 @@ class ExportGLTF2_Base:
export_image_format: EnumProperty(
name='Images',
items=(('AUTO', 'Automatic',
- 'Save PNGs as PNGs and JPEGs as JPEGs.\n'
+ 'Save PNGs as PNGs and JPEGs as JPEGs. '
'If neither one, use PNG'),
('JPEG', 'JPEG Format (.jpg)',
- 'Save images as JPEGs. (Images that need alpha are saved as PNGs though.)\n'
+ 'Save images as JPEGs. (Images that need alpha are saved as PNGs though.) '
'Be aware of a possible loss in quality'),
),
description=(
@@ -276,8 +276,8 @@ class ExportGLTF2_Base:
export_nla_strips: BoolProperty(
name='Group by NLA Track',
description=(
- "When on, multiple actions become part of the same glTF animation if\n"
- "they're pushed onto NLA tracks with the same name.\n"
+ "When on, multiple actions become part of the same glTF animation if "
+ "they're pushed onto NLA tracks with the same name. "
"When off, all the currently assigned actions become one glTF animation"
),
default=True
@@ -485,6 +485,8 @@ class ExportGLTF2_Base:
bpy.path.ensure_ext(self.filepath,self.filename_ext)))[0] + '.bin'
user_extensions = []
+ pre_export_callbacks = []
+ post_export_callbacks = []
import sys
preferences = bpy.context.preferences
@@ -500,7 +502,13 @@ class ExportGLTF2_Base:
extension_ctors = module.glTF2ExportUserExtensions
for extension_ctor in extension_ctors:
user_extensions.append(extension_ctor())
+ if hasattr(module, 'glTF2_pre_export_callback'):
+ pre_export_callbacks.append(module.glTF2_pre_export_callback)
+ if hasattr(module, 'glTF2_post_export_callback'):
+ post_export_callbacks.append(module.glTF2_post_export_callback)
export_settings['gltf_user_extensions'] = user_extensions
+ export_settings['pre_export_callbacks'] = pre_export_callbacks
+ export_settings['post_export_callbacks'] = post_export_callbacks
return gltf2_blender_export.save(context, export_settings)
@@ -836,6 +844,7 @@ class ImportGLTF2(Operator, ImportHelper):
"""Load a glTF 2.0 file"""
bl_idname = 'import_scene.gltf'
bl_label = 'Import glTF 2.0'
+ bl_options = {'REGISTER', 'UNDO'}
filter_glob: StringProperty(default="*.glb;*.gltf", options={'HIDDEN'})
@@ -854,6 +863,18 @@ class ImportGLTF2(Operator, ImportHelper):
default=True
)
+ merge_vertices: BoolProperty(
+ name='Merge Vertices',
+ description=(
+ 'The glTF format requires discontinuous normals, UVs, and '
+ 'other vertex attributes to be stored as separate vertices, '
+ 'as required for rendering on typical graphics hardware. '
+ 'This option attempts to combine co-located vertices where possible. '
+ 'Currently cannot combine verts with different normals'
+ ),
+ default=False,
+ )
+
import_shading: EnumProperty(
name="Shading",
items=(("NORMALS", "Use Normal Data", ""),
@@ -866,15 +887,15 @@ class ImportGLTF2(Operator, ImportHelper):
name="Bone Dir",
items=(
("BLENDER", "Blender (best for re-importing)",
- "Good for re-importing glTFs exported from Blender.\n"
+ "Good for re-importing glTFs exported from Blender. "
"Bone tips are placed on their local +Y axis (in glTF space)"),
("TEMPERANCE", "Temperance (average)",
- "Decent all-around strategy.\n"
- "A bone with one child has its tip placed on the local axis\n"
+ "Decent all-around strategy. "
+ "A bone with one child has its tip placed on the local axis "
"closest to its child"),
("FORTUNE", "Fortune (may look better, less accurate)",
- "Might look better than Temperance, but also might have errors.\n"
- "A bone with one child has its tip placed at its child's root.\n"
+ "Might look better than Temperance, but also might have errors. "
+ "A bone with one child has its tip placed at its child's root. "
"Non-uniform scalings may get messed up though, so beware"),
),
description="Heuristic for placing bones. Tries to make bones pretty",
@@ -885,7 +906,7 @@ class ImportGLTF2(Operator, ImportHelper):
name='Guess Original Bind Pose',
description=(
'Try to guess the original bind pose for skinned meshes from '
- 'the inverse bind matrices.\n'
+ 'the inverse bind matrices. '
'When off, use default/rest pose as bind pose'
),
default=True,
@@ -898,6 +919,7 @@ class ImportGLTF2(Operator, ImportHelper):
layout.use_property_decorate = False # No animation.
layout.prop(self, 'import_pack_images')
+ layout.prop(self, 'merge_vertices')
layout.prop(self, 'import_shading')
layout.prop(self, 'guess_original_bind_pose')
layout.prop(self, 'bone_heuristic')
diff --git a/io_scene_gltf2/blender/com/gltf2_blender_math.py b/io_scene_gltf2/blender/com/gltf2_blender_math.py
index 72eb124a..bddc79a6 100755
--- a/io_scene_gltf2/blender/com/gltf2_blender_math.py
+++ b/io_scene_gltf2/blender/com/gltf2_blender_math.py
@@ -19,11 +19,6 @@ from mathutils import Matrix, Vector, Quaternion, Euler
from io_scene_gltf2.blender.com.gltf2_blender_data_path import get_target_property_name
-def multiply(a, b):
- """Multiplication."""
- return a @ b
-
-
def list_to_mathutils(values: typing.List[float], data_path: str) -> typing.Union[Vector, Quaternion, Euler]:
"""Transform a list to blender py object."""
target = get_target_property_name(data_path)
@@ -31,7 +26,7 @@ def list_to_mathutils(values: typing.List[float], data_path: str) -> typing.Unio
if target == 'delta_location':
return Vector(values) # TODO Should be Vector(values) - Vector(something)?
elif target == 'delta_rotation_euler':
- return Euler(values).to_quaternion() # TODO Should be multiply(Euler(values).to_quaternion(), something)?
+ return Euler(values).to_quaternion() # TODO Should be Euler(values).to_quaternion() @ something?
elif target == 'location':
return Vector(values)
elif target == 'rotation_axis_angle':
@@ -138,7 +133,7 @@ def transform(v: typing.Union[Vector, Quaternion], data_path: str, transform: Ma
def transform_location(location: Vector, transform: Matrix = Matrix.Identity(4)) -> Vector:
"""Transform location."""
m = Matrix.Translation(location)
- m = multiply(transform, m)
+ m = transform @ m
return m.to_translation()
@@ -146,7 +141,7 @@ def transform_rotation(rotation: Quaternion, transform: Matrix = Matrix.Identity
"""Transform rotation."""
rotation.normalize()
m = rotation.to_matrix().to_4x4()
- m = multiply(transform, m)
+ m = transform @ m
return m.to_quaternion()
@@ -156,7 +151,7 @@ def transform_scale(scale: Vector, transform: Matrix = Matrix.Identity(4)) -> Ve
m[0][0] = scale.x
m[1][1] = scale.y
m[2][2] = scale.z
- m = multiply(transform, m)
+ m = transform @ m
return m.to_scale()
diff --git a/io_scene_gltf2/blender/exp/gltf2_blender_export.py b/io_scene_gltf2/blender/exp/gltf2_blender_export.py
index 2989ec31..fd433c7e 100755
--- a/io_scene_gltf2/blender/exp/gltf2_blender_export.py
+++ b/io_scene_gltf2/blender/exp/gltf2_blender_export.py
@@ -39,7 +39,15 @@ def save(context, export_settings):
__notify_start(context)
start_time = time.time()
+ pre_export_callbacks = export_settings["pre_export_callbacks"]
+ for callback in pre_export_callbacks:
+ callback(export_settings)
+
json, buffer = __export(export_settings)
+
+ post_export_callbacks = export_settings["post_export_callbacks"]
+ for callback in post_export_callbacks:
+ callback(export_settings)
__write_file(json, buffer, export_settings)
end_time = time.time()
diff --git a/io_scene_gltf2/blender/exp/gltf2_blender_extract.py b/io_scene_gltf2/blender/exp/gltf2_blender_extract.py
index e2d224ce..eef05044 100755
--- a/io_scene_gltf2/blender/exp/gltf2_blender_extract.py
+++ b/io_scene_gltf2/blender/exp/gltf2_blender_extract.py
@@ -17,46 +17,26 @@
#
from mathutils import Vector, Quaternion, Matrix
-from mathutils.geometry import tessellate_polygon
-from operator import attrgetter
from . import gltf2_blender_export_keys
from ...io.com.gltf2_io_debug import print_console
from ...io.com.gltf2_io_color_management import color_srgb_to_scene_linear
from io_scene_gltf2.blender.exp import gltf2_blender_gather_skins
-#
-# Globals
-#
-
-INDICES_ID = 'indices'
-MATERIAL_ID = 'material'
-ATTRIBUTES_ID = 'attributes'
-
-COLOR_PREFIX = 'COLOR_'
-MORPH_TANGENT_PREFIX = 'MORPH_TANGENT_'
-MORPH_NORMAL_PREFIX = 'MORPH_NORMAL_'
-MORPH_POSITION_PREFIX = 'MORPH_POSITION_'
-TEXCOORD_PREFIX = 'TEXCOORD_'
-WEIGHTS_PREFIX = 'WEIGHTS_'
-JOINTS_PREFIX = 'JOINTS_'
-
-TANGENT_ATTRIBUTE = 'TANGENT'
-NORMAL_ATTRIBUTE = 'NORMAL'
-POSITION_ATTRIBUTE = 'POSITION'
-
-GLTF_MAX_COLORS = 2
-
#
# Classes
#
+class Prim:
+ def __init__(self):
+ self.verts = {}
+ self.indices = []
+
class ShapeKey:
- def __init__(self, shape_key, vertex_normals, polygon_normals):
+ def __init__(self, shape_key, split_normals):
self.shape_key = shape_key
- self.vertex_normals = vertex_normals
- self.polygon_normals = polygon_normals
+ self.split_normals = split_normals
#
@@ -110,17 +90,17 @@ def convert_swizzle_tangent(tan, armature, blender_object, export_settings):
if (not armature) or (not blender_object):
# Classic case. Mesh is not skined, no need to apply armature transfoms on vertices / normals / tangents
if export_settings[gltf2_blender_export_keys.YUP]:
- return Vector((tan[0], tan[2], -tan[1], 1.0))
+ return Vector((tan[0], tan[2], -tan[1]))
else:
- return Vector((tan[0], tan[1], tan[2], 1.0))
+ return Vector((tan[0], tan[1], tan[2]))
else:
# Mesh is skined, we have to apply armature transforms on data
apply_matrix = armature.matrix_world.inverted() @ blender_object.matrix_world
- new_tan = apply_matrix.to_quaternion() @ tan
+ new_tan = apply_matrix.to_quaternion() @ Vector((tan[0], tan[1], tan[2]))
if export_settings[gltf2_blender_export_keys.YUP]:
- return Vector((new_tan[0], new_tan[2], -new_tan[1], 1.0))
+ return Vector((new_tan[0], new_tan[2], -new_tan[1]))
else:
- return Vector((new_tan[0], new_tan[1], new_tan[2], 1.0))
+ return Vector((new_tan[0], new_tan[1], new_tan[2]))
def convert_swizzle_rotation(rot, export_settings):
"""
@@ -142,567 +122,293 @@ def convert_swizzle_scale(scale, export_settings):
return Vector((scale[0], scale[1], scale[2]))
-def decompose_transition(matrix, export_settings):
- translation, rotation, scale = matrix.decompose()
-
- return translation, rotation, scale
-
-
def extract_primitives(glTF, blender_mesh, library, blender_object, blender_vertex_groups, modifiers, export_settings):
"""
Extract primitives from a mesh. Polygons are triangulated and sorted by material.
-
- Furthermore, primitives are split up, if the indices range is exceeded.
- Finally, triangles are also split up/duplicated, if face normals are used instead of vertex normals.
+ Vertices in multiple faces get split up as necessary.
"""
print_console('INFO', 'Extracting primitive: ' + blender_mesh.name)
- if blender_mesh.has_custom_normals:
- # Custom normals are all (0, 0, 0) until calling calc_normals_split() or calc_tangents().
- blender_mesh.calc_normals_split()
-
- use_tangents = False
- if blender_mesh.uv_layers.active and len(blender_mesh.uv_layers) > 0:
- try:
- blender_mesh.calc_tangents()
- use_tangents = True
- except Exception:
- print_console('WARNING', 'Could not calculate tangents. Please try to triangulate the mesh first.')
-
- #
-
- material_map = {}
-
- #
- # Gathering position, normal and tex_coords.
- #
- no_material_attributes = {
- POSITION_ATTRIBUTE: [],
- NORMAL_ATTRIBUTE: []
- }
-
- if use_tangents:
- no_material_attributes[TANGENT_ATTRIBUTE] = []
-
- #
- # Directory of materials with its primitive.
- #
- no_material_primitives = {
- MATERIAL_ID: 0,
- INDICES_ID: [],
- ATTRIBUTES_ID: no_material_attributes
- }
-
- material_idx_to_primitives = {0: no_material_primitives}
-
#
-
- vertex_index_to_new_indices = {}
-
- material_map[0] = vertex_index_to_new_indices
-
+ # First, decide what attributes to gather (eg. how many COLOR_n, etc.)
+ # Also calculate normals/tangents now if necessary.
#
- # Create primitive for each material.
- #
- for (mat_idx, _) in enumerate(blender_mesh.materials):
- attributes = {
- POSITION_ATTRIBUTE: [],
- NORMAL_ATTRIBUTE: []
- }
-
- if use_tangents:
- attributes[TANGENT_ATTRIBUTE] = []
- primitive = {
- MATERIAL_ID: mat_idx,
- INDICES_ID: [],
- ATTRIBUTES_ID: attributes
- }
-
- material_idx_to_primitives[mat_idx] = primitive
-
- #
-
- vertex_index_to_new_indices = {}
+ use_normals = export_settings[gltf2_blender_export_keys.NORMALS]
+ if use_normals:
+ blender_mesh.calc_normals_split()
- material_map[mat_idx] = vertex_index_to_new_indices
+ use_tangents = False
+ if use_normals and export_settings[gltf2_blender_export_keys.TANGENTS]:
+ if blender_mesh.uv_layers.active and len(blender_mesh.uv_layers) > 0:
+ try:
+ blender_mesh.calc_tangents()
+ use_tangents = True
+ except Exception:
+ print_console('WARNING', 'Could not calculate tangents. Please try to triangulate the mesh first.')
tex_coord_max = 0
- if blender_mesh.uv_layers.active:
- tex_coord_max = len(blender_mesh.uv_layers)
-
- #
-
- vertex_colors = {}
-
- color_index = 0
- for vertex_color in blender_mesh.vertex_colors:
- vertex_color_name = COLOR_PREFIX + str(color_index)
- vertex_colors[vertex_color_name] = vertex_color
-
- color_index += 1
- if color_index >= GLTF_MAX_COLORS:
- break
- color_max = color_index
-
- #
-
- bone_max = 0
- for blender_polygon in blender_mesh.polygons:
- for loop_index in blender_polygon.loop_indices:
- vertex_index = blender_mesh.loops[loop_index].vertex_index
- bones_count = len(blender_mesh.vertices[vertex_index].groups)
- if bones_count > 0:
- if bones_count % 4 == 0:
- bones_count -= 1
- bone_max = max(bone_max, bones_count // 4 + 1)
-
- #
+ if export_settings[gltf2_blender_export_keys.TEX_COORDS]:
+ if blender_mesh.uv_layers.active:
+ tex_coord_max = len(blender_mesh.uv_layers)
- morph_max = 0
+ color_max = 0
+ if export_settings[gltf2_blender_export_keys.COLORS]:
+ color_max = len(blender_mesh.vertex_colors)
- blender_shape_keys = []
-
- if blender_mesh.shape_keys is not None:
+ bone_max = 0 # number of JOINTS_n sets needed (1 set = 4 influences)
+ armature = None
+ if blender_vertex_groups and export_settings[gltf2_blender_export_keys.SKINS]:
+ if modifiers is not None:
+ modifiers_dict = {m.type: m for m in modifiers}
+ if "ARMATURE" in modifiers_dict:
+ modifier = modifiers_dict["ARMATURE"]
+ armature = modifier.object
+
+ # Skin must be ignored if the object is parented to a bone of the armature
+ # (This creates an infinite recursive error)
+ # So ignoring skin in that case
+ is_child_of_arma = (
+ armature and
+ blender_object and
+ blender_object.parent_type == "BONE" and
+ blender_object.parent.name == armature.name
+ )
+ if is_child_of_arma:
+ armature = None
+
+ if armature:
+ skin = gltf2_blender_gather_skins.gather_skin(armature, export_settings)
+ if not skin:
+ armature = None
+ else:
+ joint_name_to_index = {joint.name: index for index, joint in enumerate(skin.joints)}
+ group_to_joint = [joint_name_to_index.get(g.name) for g in blender_vertex_groups]
+
+ # Find out max number of bone influences
+ for blender_polygon in blender_mesh.polygons:
+ for loop_index in blender_polygon.loop_indices:
+ vertex_index = blender_mesh.loops[loop_index].vertex_index
+ groups_count = len(blender_mesh.vertices[vertex_index].groups)
+ bones_count = (groups_count + 3) // 4
+ bone_max = max(bone_max, bones_count)
+
+ use_morph_normals = use_normals and export_settings[gltf2_blender_export_keys.MORPH_NORMAL]
+ use_morph_tangents = use_morph_normals and use_tangents and export_settings[gltf2_blender_export_keys.MORPH_TANGENT]
+
+ shape_keys = []
+ if blender_mesh.shape_keys and export_settings[gltf2_blender_export_keys.MORPH]:
for blender_shape_key in blender_mesh.shape_keys.key_blocks:
- if blender_shape_key != blender_shape_key.relative_key:
- if blender_shape_key.mute is False:
- morph_max += 1
- blender_shape_keys.append(ShapeKey(
- blender_shape_key,
- blender_shape_key.normals_vertex_get(), # calculate vertex normals for this shape key
- blender_shape_key.normals_polygon_get())) # calculate polygon normals for this shape key
+ if blender_shape_key == blender_shape_key.relative_key or blender_shape_key.mute:
+ continue
+ split_normals = None
+ if use_morph_normals:
+ split_normals = blender_shape_key.normals_split_get()
+
+ shape_keys.append(ShapeKey(
+ blender_shape_key,
+ split_normals,
+ ))
- armature = None
- if modifiers is not None:
- modifiers_dict = {m.type: m for m in modifiers}
- if "ARMATURE" in modifiers_dict:
- modifier = modifiers_dict["ARMATURE"]
- armature = modifier.object
+ use_materials = export_settings[gltf2_blender_export_keys.MATERIALS]
#
- # Convert polygon to primitive indices and eliminate invalid ones. Assign to material.
+ # Gather the verts and indices for each primitive.
#
- for blender_polygon in blender_mesh.polygons:
- export_color = True
- #
+ prims = {}
- if export_settings['gltf_materials'] is False:
- primitive = material_idx_to_primitives[0]
- vertex_index_to_new_indices = material_map[0]
- elif not blender_polygon.material_index in material_idx_to_primitives:
- primitive = material_idx_to_primitives[0]
- vertex_index_to_new_indices = material_map[0]
- else:
- primitive = material_idx_to_primitives[blender_polygon.material_index]
- vertex_index_to_new_indices = material_map[blender_polygon.material_index]
- #
-
- attributes = primitive[ATTRIBUTES_ID]
-
- face_normal = blender_polygon.normal
- face_tangent = Vector((0.0, 0.0, 0.0))
- face_bitangent = Vector((0.0, 0.0, 0.0))
- if use_tangents:
- for loop_index in blender_polygon.loop_indices:
- temp_vertex = blender_mesh.loops[loop_index]
- face_tangent += temp_vertex.tangent
- face_bitangent += temp_vertex.bitangent
-
- face_tangent.normalize()
- face_bitangent.normalize()
-
- #
+ blender_mesh.calc_loop_triangles()
- indices = primitive[INDICES_ID]
+ for loop_tri in blender_mesh.loop_triangles:
+ blender_polygon = blender_mesh.polygons[loop_tri.polygon_index]
- loop_index_list = []
+ material_idx = -1
+ if use_materials:
+ material_idx = blender_polygon.material_index
- if len(blender_polygon.loop_indices) == 3:
- loop_index_list.extend(blender_polygon.loop_indices)
- elif len(blender_polygon.loop_indices) > 3:
- # Triangulation of polygon. Using internal function, as non-convex polygons could exist.
- polyline = []
+ prim = prims.get(material_idx)
+ if not prim:
+ prim = Prim()
+ prims[material_idx] = prim
- for loop_index in blender_polygon.loop_indices:
- vertex_index = blender_mesh.loops[loop_index].vertex_index
- v = blender_mesh.vertices[vertex_index].co
- polyline.append(Vector((v[0], v[1], v[2])))
-
- triangles = tessellate_polygon((polyline,))
-
- for triangle in triangles:
-
- for triangle_index in triangle:
- loop_index_list.append(blender_polygon.loop_indices[triangle_index])
- else:
- continue
-
- for loop_index in loop_index_list:
+ for loop_index in loop_tri.loops:
vertex_index = blender_mesh.loops[loop_index].vertex_index
+ vertex = blender_mesh.vertices[vertex_index]
- if vertex_index_to_new_indices.get(vertex_index) is None:
- vertex_index_to_new_indices[vertex_index] = []
-
- #
-
- v = None
- n = None
- t = None
- b = None
- uvs = []
- colors = []
- joints = []
- weights = []
-
- target_positions = []
- target_normals = []
- target_tangents = []
+ # vert will be a tuple of all the vertex attributes.
+ # Used as cache key in prim.verts.
+ vert = (vertex_index,)
- vertex = blender_mesh.vertices[vertex_index]
+ v = vertex.co
+ vert += ((v[0], v[1], v[2]),)
- v = convert_swizzle_location(vertex.co, armature, blender_object, export_settings)
- if blender_polygon.use_smooth or blender_mesh.use_auto_smooth:
- if blender_mesh.has_custom_normals:
- n = convert_swizzle_normal(blender_mesh.loops[loop_index].normal, armature, blender_object, export_settings)
- else:
- n = convert_swizzle_normal(vertex.normal, armature, blender_object, export_settings)
+ if use_normals:
+ n = blender_mesh.loops[loop_index].normal
+ vert += ((n[0], n[1], n[2]),)
if use_tangents:
- t = convert_swizzle_tangent(blender_mesh.loops[loop_index].tangent, armature, blender_object, export_settings)
- b = convert_swizzle_location(blender_mesh.loops[loop_index].bitangent, armature, blender_object, export_settings)
- else:
- n = convert_swizzle_normal(face_normal, armature, blender_object, export_settings)
- if use_tangents:
- t = convert_swizzle_tangent(face_tangent, armature, blender_object, export_settings)
- b = convert_swizzle_location(face_bitangent, armature, blender_object, export_settings)
-
- if use_tangents:
- tv = Vector((t[0], t[1], t[2]))
- bv = Vector((b[0], b[1], b[2]))
- nv = Vector((n[0], n[1], n[2]))
-
- if (nv.cross(tv)).dot(bv) < 0.0:
- t[3] = -1.0
-
- if blender_mesh.uv_layers.active:
- for tex_coord_index in range(0, tex_coord_max):
- uv = blender_mesh.uv_layers[tex_coord_index].data[loop_index].uv
- uvs.append([uv.x, 1.0 - uv.y])
-
- #
-
- if color_max > 0 and export_color:
- for color_index in range(0, color_max):
- color_name = COLOR_PREFIX + str(color_index)
- color = vertex_colors[color_name].data[loop_index].color
- colors.append([
- color_srgb_to_scene_linear(color[0]),
- color_srgb_to_scene_linear(color[1]),
- color_srgb_to_scene_linear(color[2]),
- color[3]
- ])
-
- #
-
- bone_count = 0
-
- # Skin must be ignored if the object is parented to a bone of the armature
- # (This creates an infinite recursive error)
- # So ignoring skin in that case
- if blender_object and blender_object.parent_type == "BONE" and blender_object.parent.name == armature.name:
- bone_max = 0 # joints & weights will be ignored in following code
- else:
- # Manage joints & weights
- if blender_vertex_groups is not None and vertex.groups is not None and len(vertex.groups) > 0 and export_settings[gltf2_blender_export_keys.SKINS]:
- joint = []
- weight = []
- vertex_groups = vertex.groups
- if not export_settings['gltf_all_vertex_influences']:
- # sort groups by weight descending
- vertex_groups = sorted(vertex.groups, key=attrgetter('weight'), reverse=True)
- for group_element in vertex_groups:
-
- if len(joint) == 4:
- bone_count += 1
- joints.append(joint)
- weights.append(weight)
- joint = []
- weight = []
-
- #
-
- joint_weight = group_element.weight
- if joint_weight <= 0.0:
+ t = blender_mesh.loops[loop_index].tangent
+ b = blender_mesh.loops[loop_index].bitangent
+ vert += ((t[0], t[1], t[2]),)
+ vert += ((b[0], b[1], b[2]),)
+ # TODO: store just bitangent_sign in vert, not whole bitangent?
+
+ for tex_coord_index in range(0, tex_coord_max):
+ uv = blender_mesh.uv_layers[tex_coord_index].data[loop_index].uv
+ uv = (uv.x, 1.0 - uv.y)
+ vert += (uv,)
+
+ for color_index in range(0, color_max):
+ color = blender_mesh.vertex_colors[color_index].data[loop_index].color
+ col = (
+ color_srgb_to_scene_linear(color[0]),
+ color_srgb_to_scene_linear(color[1]),
+ color_srgb_to_scene_linear(color[2]),
+ color[3],
+ )
+ vert += (col,)
+
+ if bone_max:
+ bones = []
+ if vertex.groups:
+ for group_element in vertex.groups:
+ weight = group_element.weight
+ if weight <= 0.0:
continue
-
- #
-
- vertex_group_index = group_element.group
-
- if vertex_group_index < 0 or vertex_group_index >= len(blender_vertex_groups):
+ try:
+ joint = group_to_joint[group_element.group]
+ except Exception:
continue
- vertex_group_name = blender_vertex_groups[vertex_group_index].name
-
- joint_index = None
-
- if armature:
- skin = gltf2_blender_gather_skins.gather_skin(armature, export_settings)
- for index, j in enumerate(skin.joints):
- if j.name == vertex_group_name:
- joint_index = index
- break
-
- #
- if joint_index is not None:
- joint.append(joint_index)
- weight.append(joint_weight)
-
- if len(joint) > 0:
- bone_count += 1
-
- for fill in range(0, 4 - len(joint)):
- joint.append(0)
- weight.append(0.0)
-
- joints.append(joint)
- weights.append(weight)
-
- for fill in range(0, bone_max - bone_count):
- joints.append([0, 0, 0, 0])
- weights.append([0.0, 0.0, 0.0, 0.0])
-
- #
-
- if morph_max > 0 and export_settings[gltf2_blender_export_keys.MORPH]:
- for morph_index in range(0, morph_max):
- blender_shape_key = blender_shape_keys[morph_index]
-
- v_morph = convert_swizzle_location(blender_shape_key.shape_key.data[vertex_index].co,
- armature, blender_object,
- export_settings)
-
- # Store delta.
- v_morph -= v
-
- target_positions.append(v_morph)
-
- #
-
- n_morph = None
-
- if blender_polygon.use_smooth:
- temp_normals = blender_shape_key.vertex_normals
- n_morph = (temp_normals[vertex_index * 3 + 0], temp_normals[vertex_index * 3 + 1],
- temp_normals[vertex_index * 3 + 2])
- else:
- temp_normals = blender_shape_key.polygon_normals
- n_morph = (
- temp_normals[blender_polygon.index * 3 + 0], temp_normals[blender_polygon.index * 3 + 1],
- temp_normals[blender_polygon.index * 3 + 2])
-
- n_morph = convert_swizzle_normal(Vector(n_morph), armature, blender_object, export_settings)
-
- # Store delta.
- n_morph -= n
-
- target_normals.append(n_morph)
-
- #
-
- if use_tangents:
- rotation = n_morph.rotation_difference(n)
-
- t_morph = Vector((t[0], t[1], t[2]))
-
- t_morph.rotate(rotation)
-
- target_tangents.append(t_morph)
-
- #
- #
+ if joint is None:
+ continue
+ bones.append((joint, weight))
+ bones.sort(key=lambda x: x[1], reverse=True)
+ bones = tuple(bones)
+ if not bones: bones = ((0, 1.0),) # HACK for verts with zero weight (#308)
+ vert += (bones,)
+
+ for shape_key in shape_keys:
+ v_morph = shape_key.shape_key.data[vertex_index].co
+ v_morph = v_morph - v # store delta
+ vert += ((v_morph[0], v_morph[1], v_morph[2]),)
+
+ if use_morph_normals:
+ normals = shape_key.split_normals
+ n_morph = Vector(normals[loop_index * 3 : loop_index * 3 + 3])
+ n_morph = n_morph - n # store delta
+ vert += ((n_morph[0], n_morph[1], n_morph[2]),)
+
+ vert_idx = prim.verts.setdefault(vert, len(prim.verts))
+ prim.indices.append(vert_idx)
- create = True
+ #
+ # Put the verts into attribute arrays.
+ #
- for current_new_index in vertex_index_to_new_indices[vertex_index]:
- found = True
+ result_primitives = []
- for i in range(0, 3):
- if attributes[POSITION_ATTRIBUTE][current_new_index * 3 + i] != v[i]:
- found = False
- break
+ for material_idx, prim in prims.items():
+ if not prim.indices:
+ continue
- if attributes[NORMAL_ATTRIBUTE][current_new_index * 3 + i] != n[i]:
- found = False
- break
+ vs = []
+ ns = []
+ ts = []
+ uvs = [[] for _ in range(tex_coord_max)]
+ cols = [[] for _ in range(color_max)]
+ joints = [[] for _ in range(bone_max)]
+ weights = [[] for _ in range(bone_max)]
+ vs_morph = [[] for _ in shape_keys]
+ ns_morph = [[] for _ in shape_keys]
+ ts_morph = [[] for _ in shape_keys]
+
+ for vert in prim.verts.keys():
+ i = 0
+
+ i += 1 # skip over Blender mesh index
+
+ v = vert[i]
+ i += 1
+ v = convert_swizzle_location(v, armature, blender_object, export_settings)
+ vs.extend(v)
+
+ if use_normals:
+ n = vert[i]
+ i += 1
+ n = convert_swizzle_normal(n, armature, blender_object, export_settings)
+ ns.extend(n)
if use_tangents:
- for i in range(0, 4):
- if attributes[TANGENT_ATTRIBUTE][current_new_index * 4 + i] != t[i]:
- found = False
- break
-
- if not found:
- continue
-
- for tex_coord_index in range(0, tex_coord_max):
- uv = uvs[tex_coord_index]
-
- tex_coord_id = TEXCOORD_PREFIX + str(tex_coord_index)
- for i in range(0, 2):
- if attributes[tex_coord_id][current_new_index * 2 + i] != uv[i]:
- found = False
- break
-
- if export_color:
- for color_index in range(0, color_max):
- color = colors[color_index]
-
- color_id = COLOR_PREFIX + str(color_index)
- for i in range(0, 3):
- # Alpha is always 1.0 - see above.
- current_color = attributes[color_id][current_new_index * 4 + i]
- if color_srgb_to_scene_linear(current_color) != color[i]:
- found = False
- break
-
- if export_settings[gltf2_blender_export_keys.SKINS]:
- for bone_index in range(0, bone_max):
- joint = joints[bone_index]
- weight = weights[bone_index]
-
- joint_id = JOINTS_PREFIX + str(bone_index)
- weight_id = WEIGHTS_PREFIX + str(bone_index)
- for i in range(0, 4):
- if attributes[joint_id][current_new_index * 4 + i] != joint[i]:
- found = False
- break
- if attributes[weight_id][current_new_index * 4 + i] != weight[i]:
- found = False
- break
-
- if export_settings[gltf2_blender_export_keys.MORPH]:
- for morph_index in range(0, morph_max):
- target_position = target_positions[morph_index]
- target_normal = target_normals[morph_index]
- if use_tangents:
- target_tangent = target_tangents[morph_index]
-
- target_position_id = MORPH_POSITION_PREFIX + str(morph_index)
- target_normal_id = MORPH_NORMAL_PREFIX + str(morph_index)
- target_tangent_id = MORPH_TANGENT_PREFIX + str(morph_index)
- for i in range(0, 3):
- if attributes[target_position_id][current_new_index * 3 + i] != target_position[i]:
- found = False
- break
- if attributes[target_normal_id][current_new_index * 3 + i] != target_normal[i]:
- found = False
- break
- if use_tangents:
- if attributes[target_tangent_id][current_new_index * 3 + i] != target_tangent[i]:
- found = False
- break
-
- if found:
- indices.append(current_new_index)
-
- create = False
- break
-
- if not create:
- continue
-
- new_index = 0
-
- if primitive.get('max_index') is not None:
- new_index = primitive['max_index'] + 1
-
- primitive['max_index'] = new_index
-
- vertex_index_to_new_indices[vertex_index].append(new_index)
-
- #
- #
-
- indices.append(new_index)
-
- #
-
- attributes[POSITION_ATTRIBUTE].extend(v)
- attributes[NORMAL_ATTRIBUTE].extend(n)
- if use_tangents:
- attributes[TANGENT_ATTRIBUTE].extend(t)
-
- if blender_mesh.uv_layers.active:
- for tex_coord_index in range(0, tex_coord_max):
- tex_coord_id = TEXCOORD_PREFIX + str(tex_coord_index)
-
- if attributes.get(tex_coord_id) is None:
- attributes[tex_coord_id] = []
-
- attributes[tex_coord_id].extend(uvs[tex_coord_index])
-
- if export_color:
- for color_index in range(0, color_max):
- color_id = COLOR_PREFIX + str(color_index)
-
- if attributes.get(color_id) is None:
- attributes[color_id] = []
-
- attributes[color_id].extend(colors[color_index])
-
- if export_settings[gltf2_blender_export_keys.SKINS]:
- for bone_index in range(0, bone_max):
- joint_id = JOINTS_PREFIX + str(bone_index)
-
- if attributes.get(joint_id) is None:
- attributes[joint_id] = []
-
- attributes[joint_id].extend(joints[bone_index])
-
- weight_id = WEIGHTS_PREFIX + str(bone_index)
-
- if attributes.get(weight_id) is None:
- attributes[weight_id] = []
-
- attributes[weight_id].extend(weights[bone_index])
-
- if export_settings[gltf2_blender_export_keys.MORPH]:
- for morph_index in range(0, morph_max):
- target_position_id = MORPH_POSITION_PREFIX + str(morph_index)
-
- if attributes.get(target_position_id) is None:
- attributes[target_position_id] = []
-
- attributes[target_position_id].extend(target_positions[morph_index])
-
- target_normal_id = MORPH_NORMAL_PREFIX + str(morph_index)
-
- if attributes.get(target_normal_id) is None:
- attributes[target_normal_id] = []
-
- attributes[target_normal_id].extend(target_normals[morph_index])
-
- if use_tangents:
- target_tangent_id = MORPH_TANGENT_PREFIX + str(morph_index)
-
- if attributes.get(target_tangent_id) is None:
- attributes[target_tangent_id] = []
-
- attributes[target_tangent_id].extend(target_tangents[morph_index])
-
- #
- # Add non-empty primitives
- #
-
- result_primitives = [
- primitive
- for primitive in material_idx_to_primitives.values()
- if len(primitive[INDICES_ID]) != 0
- ]
-
- print_console('INFO', 'Primitives created: ' + str(len(result_primitives)))
+ t = vert[i]
+ i += 1
+ t = convert_swizzle_tangent(t, armature, blender_object, export_settings)
+ ts.extend(t)
+
+ b = vert[i]
+ i += 1
+ b = convert_swizzle_tangent(b, armature, blender_object, export_settings)
+ b_sign = -1.0 if (Vector(n).cross(Vector(t))).dot(Vector(b)) < 0.0 else 1.0
+ ts.append(b_sign)
+
+ for tex_coord_index in range(0, tex_coord_max):
+ uv = vert[i]
+ i += 1
+ uvs[tex_coord_index].extend(uv)
+
+ for color_index in range(0, color_max):
+ col = vert[i]
+ i += 1
+ cols[color_index].extend(col)
+
+ if bone_max:
+ bones = vert[i]
+ i += 1
+ for j in range(0, 4 * bone_max):
+ if j < len(bones):
+ joint, weight = bones[j]
+ else:
+ joint, weight = 0, 0.0
+ joints[j//4].append(joint)
+ weights[j//4].append(weight)
+
+ for shape_key_index in range(0, len(shape_keys)):
+ v_morph = vert[i]
+ i += 1
+ v_morph = convert_swizzle_location(v_morph, armature, blender_object, export_settings)
+ vs_morph[shape_key_index].extend(v_morph)
+
+ if use_morph_normals:
+ n_morph = vert[i]
+ i += 1
+ n_morph = convert_swizzle_normal(n_morph, armature, blender_object, export_settings)
+ ns_morph[shape_key_index].extend(n_morph)
+
+ if use_morph_tangents:
+ rotation = n_morph.rotation_difference(n)
+ t_morph = Vector(t)
+ t_morph.rotate(rotation)
+ ts_morph[shape_key_index].extend(t_morph)
+
+ attributes = {}
+ attributes['POSITION'] = vs
+ if ns: attributes['NORMAL'] = ns
+ if ts: attributes['TANGENT'] = ts
+ for i, uv in enumerate(uvs): attributes['TEXCOORD_%d' % i] = uv
+ for i, col in enumerate(cols): attributes['COLOR_%d' % i] = col
+ for i, js in enumerate(joints): attributes['JOINTS_%d' % i] = js
+ for i, ws in enumerate(weights): attributes['WEIGHTS_%d' % i] = ws
+ for i, vm in enumerate(vs_morph): attributes['MORPH_POSITION_%d' % i] = vm
+ for i, nm in enumerate(ns_morph): attributes['MORPH_NORMAL_%d' % i] = nm
+ for i, tm in enumerate(ts_morph): attributes['MORPH_TANGENT_%d' % i] = tm
+
+ result_primitives.append({
+ 'attributes': attributes,
+ 'indices': prim.indices,
+ 'material': material_idx,
+ })
+
+ print_console('INFO', 'Primitives created: %d' % len(result_primitives))
return result_primitives
diff --git a/io_scene_gltf2/blender/exp/gltf2_blender_gather_animation_sampler_keyframes.py b/io_scene_gltf2/blender/exp/gltf2_blender_gather_animation_sampler_keyframes.py
index 822aa6a1..f8ab333e 100755
--- a/io_scene_gltf2/blender/exp/gltf2_blender_gather_animation_sampler_keyframes.py
+++ b/io_scene_gltf2/blender/exp/gltf2_blender_gather_animation_sampler_keyframes.py
@@ -161,8 +161,14 @@ def get_bone_matrix(blender_object_if_armature: typing.Optional[bpy.types.Object
if bake_bone is None:
matrix = pbone.matrix_basis.copy()
else:
- matrix = pbone.matrix
- matrix = blender_object_if_armature.convert_space(pose_bone=pbone, matrix=matrix, from_space='POSE', to_space='LOCAL')
+ if (pbone.bone.use_inherit_rotation == False or pbone.bone.inherit_scale != "FULL") and pbone.parent != None:
+ rest_mat = (pbone.parent.bone.matrix_local.inverted_safe() @ pbone.bone.matrix_local)
+ matrix = (rest_mat.inverted_safe() @ pbone.parent.matrix.inverted_safe() @ pbone.matrix)
+ else:
+ matrix = pbone.matrix
+ matrix = blender_object_if_armature.convert_space(pose_bone=pbone, matrix=matrix, from_space='POSE', to_space='LOCAL')
+
+
data[frame][pbone.name] = matrix
diff --git a/io_scene_gltf2/blender/exp/gltf2_blender_gather_animation_samplers.py b/io_scene_gltf2/blender/exp/gltf2_blender_gather_animation_samplers.py
index f2375bb1..c3913367 100755
--- a/io_scene_gltf2/blender/exp/gltf2_blender_gather_animation_samplers.py
+++ b/io_scene_gltf2/blender/exp/gltf2_blender_gather_animation_samplers.py
@@ -343,10 +343,12 @@ def __gather_output(channels: typing.Tuple[bpy.types.FCurve],
(0.0, 0.0, 1.0, 0.0),
(0.0, -1.0, 0.0, 0.0),
(0.0, 0.0, 0.0, 1.0)))
- correction_matrix_local = gltf2_blender_math.multiply(axis_basis_change, bone.bone.matrix_local)
+ correction_matrix_local = axis_basis_change @ bone.bone.matrix_local
else:
- correction_matrix_local = gltf2_blender_math.multiply(
- bone.parent.bone.matrix_local.inverted(), bone.bone.matrix_local)
+ correction_matrix_local = (
+ bone.parent.bone.matrix_local.inverted() @
+ bone.bone.matrix_local
+ )
transform = correction_matrix_local
else:
diff --git a/io_scene_gltf2/blender/exp/gltf2_blender_gather_cameras.py b/io_scene_gltf2/blender/exp/gltf2_blender_gather_cameras.py
index 585f0be3..bb211fe2 100755
--- a/io_scene_gltf2/blender/exp/gltf2_blender_gather_cameras.py
+++ b/io_scene_gltf2/blender/exp/gltf2_blender_gather_cameras.py
@@ -92,18 +92,18 @@ def __gather_perspective(blender_camera, export_settings):
width = bpy.context.scene.render.pixel_aspect_x * bpy.context.scene.render.resolution_x
height = bpy.context.scene.render.pixel_aspect_y * bpy.context.scene.render.resolution_y
- perspective.aspectRatio = width / height
+ perspective.aspect_ratio = width / height
if width >= height:
if blender_camera.sensor_fit != 'VERTICAL':
- perspective.yfov = 2.0 * math.atan(math.tan(blender_camera.angle * 0.5) / perspective.aspectRatio)
+ perspective.yfov = 2.0 * math.atan(math.tan(blender_camera.angle * 0.5) / perspective.aspect_ratio)
else:
perspective.yfov = blender_camera.angle
else:
if blender_camera.sensor_fit != 'HORIZONTAL':
perspective.yfov = blender_camera.angle
else:
- perspective.yfov = 2.0 * math.atan(math.tan(blender_camera.angle * 0.5) / perspective.aspectRatio)
+ perspective.yfov = 2.0 * math.atan(math.tan(blender_camera.angle * 0.5) / perspective.aspect_ratio)
perspective.znear = blender_camera.clip_start
perspective.zfar = blender_camera.clip_end
diff --git a/io_scene_gltf2/blender/exp/gltf2_blender_gather_joints.py b/io_scene_gltf2/blender/exp/gltf2_blender_gather_joints.py
index af086c1b..dff55d17 100755
--- a/io_scene_gltf2/blender/exp/gltf2_blender_gather_joints.py
+++ b/io_scene_gltf2/blender/exp/gltf2_blender_gather_joints.py
@@ -17,8 +17,6 @@ import mathutils
from . import gltf2_blender_export_keys
from io_scene_gltf2.blender.exp.gltf2_blender_gather_cache import cached
from io_scene_gltf2.io.com import gltf2_io
-from io_scene_gltf2.blender.exp import gltf2_blender_extract
-from io_scene_gltf2.blender.com import gltf2_blender_math
from io_scene_gltf2.blender.exp import gltf2_blender_gather_skins
from io_scene_gltf2.io.exp.gltf2_io_user_extensions import export_user_extensions
from ..com.gltf2_blender_extras import generate_extras
@@ -39,13 +37,21 @@ def gather_joint(blender_object, blender_bone, export_settings):
# extract bone transform
if blender_bone.parent is None:
- correction_matrix_local = gltf2_blender_math.multiply(axis_basis_change, blender_bone.bone.matrix_local)
+ correction_matrix_local = axis_basis_change @ blender_bone.bone.matrix_local
else:
- correction_matrix_local = gltf2_blender_math.multiply(
- blender_bone.parent.bone.matrix_local.inverted(), blender_bone.bone.matrix_local)
- matrix_basis = blender_bone.matrix_basis
- trans, rot, sca = gltf2_blender_extract.decompose_transition(
- gltf2_blender_math.multiply(correction_matrix_local, matrix_basis), export_settings)
+ correction_matrix_local = (
+ blender_bone.parent.bone.matrix_local.inverted() @
+ blender_bone.bone.matrix_local
+ )
+
+ if (blender_bone.bone.use_inherit_rotation == False or blender_bone.bone.inherit_scale != "FULL") and blender_bone.parent != None:
+ rest_mat = (blender_bone.parent.bone.matrix_local.inverted_safe() @ blender_bone.bone.matrix_local)
+ matrix_basis = (rest_mat.inverted_safe() @ blender_bone.parent.matrix.inverted_safe() @ blender_bone.matrix)
+ else:
+ matrix_basis = blender_bone.matrix
+ matrix_basis = blender_object.convert_space(pose_bone=blender_bone, matrix=matrix_basis, from_space='POSE', to_space='LOCAL')
+
+ trans, rot, sca = (correction_matrix_local @ matrix_basis).decompose()
translation, rotation, scale = (None, None, None)
if trans[0] != 0.0 or trans[1] != 0.0 or trans[2] != 0.0:
translation = [trans[0], trans[1], trans[2]]
diff --git a/io_scene_gltf2/blender/exp/gltf2_blender_gather_materials_pbr_metallic_roughness.py b/io_scene_gltf2/blender/exp/gltf2_blender_gather_materials_pbr_metallic_roughness.py
index 54493799..7913d175 100755
--- a/io_scene_gltf2/blender/exp/gltf2_blender_gather_materials_pbr_metallic_roughness.py
+++ b/io_scene_gltf2/blender/exp/gltf2_blender_gather_materials_pbr_metallic_roughness.py
@@ -47,6 +47,9 @@ def __filter_pbr_material(blender_material, export_settings):
def __gather_base_color_factor(blender_material, export_settings):
+ alpha_socket = gltf2_blender_get.get_socket(blender_material, "Alpha")
+ alpha = alpha_socket.default_value if alpha_socket is not None and not alpha_socket.is_linked else 1.0
+
base_color_socket = gltf2_blender_get.get_socket(blender_material, "Base Color")
if base_color_socket is None:
base_color_socket = gltf2_blender_get.get_socket(blender_material, "BaseColor")
@@ -57,7 +60,7 @@ def __gather_base_color_factor(blender_material, export_settings):
if not isinstance(base_color_socket, bpy.types.NodeSocket):
return None
if not base_color_socket.is_linked:
- return list(base_color_socket.default_value)
+ return list(base_color_socket.default_value)[:3] + [alpha]
texture_node = __get_tex_from_socket(base_color_socket)
if texture_node is None:
@@ -85,7 +88,7 @@ def __gather_base_color_factor(blender_material, export_settings):
.format(multiply_node.name))
return None
- return list(factor_socket.default_value)
+ return list(factor_socket.default_value)[:3] + [alpha]
def __gather_base_color_texture(blender_material, export_settings):
diff --git a/io_scene_gltf2/blender/exp/gltf2_blender_gather_nodes.py b/io_scene_gltf2/blender/exp/gltf2_blender_gather_nodes.py
index 83984c2b..b09e7aa1 100755
--- a/io_scene_gltf2/blender/exp/gltf2_blender_gather_nodes.py
+++ b/io_scene_gltf2/blender/exp/gltf2_blender_gather_nodes.py
@@ -189,8 +189,8 @@ def __gather_children(blender_object, blender_scene, export_settings):
rot_quat = Quaternion(rot)
axis_basis_change = Matrix(
((1.0, 0.0, 0.0, 0.0), (0.0, 0.0, -1.0, 0.0), (0.0, 1.0, 0.0, 0.0), (0.0, 0.0, 0.0, 1.0)))
- mat = gltf2_blender_math.multiply(child.matrix_parent_inverse, child.matrix_basis)
- mat = gltf2_blender_math.multiply(mat, axis_basis_change)
+ mat = child.matrix_parent_inverse @ child.matrix_basis
+ mat = mat @ axis_basis_change
_, rot_quat, _ = mat.decompose()
child_node.rotation = [rot_quat[1], rot_quat[2], rot_quat[3], rot_quat[0]]
@@ -248,6 +248,9 @@ def __gather_matrix(blender_object, export_settings):
def __gather_mesh(blender_object, library, export_settings):
+ if blender_object.type in ['CURVE', 'SURFACE', 'FONT']:
+ return __gather_mesh_from_nonmesh(blender_object, library, export_settings)
+
if blender_object.type != "MESH":
return None
@@ -338,6 +341,49 @@ def __gather_mesh(blender_object, library, export_settings):
return result
+def __gather_mesh_from_nonmesh(blender_object, library, export_settings):
+ """Handles curves, surfaces, text, etc."""
+ needs_to_mesh_clear = False
+ try:
+ # Convert to a mesh
+ try:
+ if export_settings[gltf2_blender_export_keys.APPLY]:
+ depsgraph = bpy.context.evaluated_depsgraph_get()
+ blender_mesh_owner = blender_object.evaluated_get(depsgraph)
+ blender_mesh = blender_mesh_owner.to_mesh(preserve_all_data_layers=True, depsgraph=depsgraph)
+ # TODO: do we need preserve_all_data_layers?
+
+ else:
+ blender_mesh_owner = blender_object
+ blender_mesh = blender_mesh_owner.to_mesh()
+
+ except Exception:
+ return None
+
+ needs_to_mesh_clear = True
+
+ skip_filter = True
+ material_names = tuple([ms.material.name for ms in blender_object.material_slots if ms.material is not None])
+ vertex_groups = None
+ modifiers = None
+ blender_object_for_skined_data = None
+
+ result = gltf2_blender_gather_mesh.gather_mesh(blender_mesh,
+ library,
+ blender_object_for_skined_data,
+ vertex_groups,
+ modifiers,
+ skip_filter,
+ material_names,
+ export_settings)
+
+ finally:
+ if needs_to_mesh_clear:
+ blender_mesh_owner.to_mesh_clear()
+
+ return result
+
+
def __gather_name(blender_object, export_settings):
return blender_object.name
@@ -358,7 +404,7 @@ def __gather_trans_rot_scale(blender_object, export_settings):
if blender_object.matrix_local[3][3] != 0.0:
- trans, rot, sca = gltf2_blender_extract.decompose_transition(blender_object.matrix_local, export_settings)
+ trans, rot, sca = blender_object.matrix_local.decompose()
else:
# Some really weird cases, scale is null (if parent is null when evaluation is done)
print_console('WARNING', 'Some nodes are 0 scaled during evaluation. Result can be wrong')
diff --git a/io_scene_gltf2/blender/exp/gltf2_blender_gather_primitive_attributes.py b/io_scene_gltf2/blender/exp/gltf2_blender_gather_primitive_attributes.py
index f5856257..61adea89 100755
--- a/io_scene_gltf2/blender/exp/gltf2_blender_gather_primitive_attributes.py
+++ b/io_scene_gltf2/blender/exp/gltf2_blender_gather_primitive_attributes.py
@@ -12,12 +12,13 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+import numpy as np
+
from . import gltf2_blender_export_keys
from io_scene_gltf2.io.com import gltf2_io
from io_scene_gltf2.io.com import gltf2_io_constants
from io_scene_gltf2.io.com import gltf2_io_debug
from io_scene_gltf2.io.exp import gltf2_io_binary_data
-from io_scene_gltf2.blender.exp import gltf2_blender_utils
def gather_primitive_attributes(blender_primitive, export_settings):
@@ -36,72 +37,79 @@ def gather_primitive_attributes(blender_primitive, export_settings):
return attributes
+def array_to_accessor(array, component_type, data_type, include_max_and_min=False):
+ dtype = gltf2_io_constants.ComponentType.to_numpy_dtype(component_type)
+ num_elems = gltf2_io_constants.DataType.num_elements(data_type)
+
+ if type(array) is not np.ndarray:
+ array = np.array(array, dtype=dtype)
+ array = array.reshape(len(array) // num_elems, num_elems)
+
+ assert array.dtype == dtype
+ assert array.shape[1] == num_elems
+
+ amax = None
+ amin = None
+ if include_max_and_min:
+ amax = np.amax(array, axis=0).tolist()
+ amin = np.amin(array, axis=0).tolist()
+
+ return gltf2_io.Accessor(
+ buffer_view=gltf2_io_binary_data.BinaryData(array.tobytes()),
+ byte_offset=None,
+ component_type=component_type,
+ count=len(array),
+ extensions=None,
+ extras=None,
+ max=amax,
+ min=amin,
+ name=None,
+ normalized=None,
+ sparse=None,
+ type=data_type,
+ )
+
+
def __gather_position(blender_primitive, export_settings):
position = blender_primitive["attributes"]["POSITION"]
- componentType = gltf2_io_constants.ComponentType.Float
return {
- "POSITION": gltf2_io.Accessor(
- buffer_view=gltf2_io_binary_data.BinaryData.from_list(position, componentType),
- byte_offset=None,
- component_type=componentType,
- count=len(position) // gltf2_io_constants.DataType.num_elements(gltf2_io_constants.DataType.Vec3),
- extensions=None,
- extras=None,
- max=gltf2_blender_utils.max_components(position, gltf2_io_constants.DataType.Vec3),
- min=gltf2_blender_utils.min_components(position, gltf2_io_constants.DataType.Vec3),
- name=None,
- normalized=None,
- sparse=None,
- type=gltf2_io_constants.DataType.Vec3
+ "POSITION": array_to_accessor(
+ position,
+ component_type=gltf2_io_constants.ComponentType.Float,
+ data_type=gltf2_io_constants.DataType.Vec3,
+ include_max_and_min=True
)
}
def __gather_normal(blender_primitive, export_settings):
- if export_settings[gltf2_blender_export_keys.NORMALS]:
- normal = blender_primitive["attributes"]['NORMAL']
- return {
- "NORMAL": gltf2_io.Accessor(
- buffer_view=gltf2_io_binary_data.BinaryData.from_list(normal, gltf2_io_constants.ComponentType.Float),
- byte_offset=None,
- component_type=gltf2_io_constants.ComponentType.Float,
- count=len(normal) // gltf2_io_constants.DataType.num_elements(gltf2_io_constants.DataType.Vec3),
- extensions=None,
- extras=None,
- max=None,
- min=None,
- name=None,
- normalized=None,
- sparse=None,
- type=gltf2_io_constants.DataType.Vec3
- )
- }
- return {}
+ if not export_settings[gltf2_blender_export_keys.NORMALS]:
+ return {}
+ normal = blender_primitive["attributes"].get('NORMAL')
+ if not normal:
+ return {}
+ return {
+ "NORMAL": array_to_accessor(
+ normal,
+ component_type=gltf2_io_constants.ComponentType.Float,
+ data_type=gltf2_io_constants.DataType.Vec3,
+ )
+ }
def __gather_tangent(blender_primitive, export_settings):
- if export_settings[gltf2_blender_export_keys.TANGENTS]:
- if blender_primitive["attributes"].get('TANGENT') is not None:
- tangent = blender_primitive["attributes"]['TANGENT']
- return {
- "TANGENT": gltf2_io.Accessor(
- buffer_view=gltf2_io_binary_data.BinaryData.from_list(
- tangent, gltf2_io_constants.ComponentType.Float),
- byte_offset=None,
- component_type=gltf2_io_constants.ComponentType.Float,
- count=len(tangent) // gltf2_io_constants.DataType.num_elements(gltf2_io_constants.DataType.Vec4),
- extensions=None,
- extras=None,
- max=None,
- min=None,
- name=None,
- normalized=None,
- sparse=None,
- type=gltf2_io_constants.DataType.Vec4
- )
- }
-
- return {}
+ if not export_settings[gltf2_blender_export_keys.TANGENTS]:
+ return {}
+ tangent = blender_primitive["attributes"].get('TANGENT')
+ if not tangent:
+ return {}
+ return {
+ "TANGENT": array_to_accessor(
+ tangent,
+ component_type=gltf2_io_constants.ComponentType.Float,
+ data_type=gltf2_io_constants.DataType.Vec4,
+ )
+ }
def __gather_texcoord(blender_primitive, export_settings):
@@ -111,20 +119,10 @@ def __gather_texcoord(blender_primitive, export_settings):
tex_coord_id = 'TEXCOORD_' + str(tex_coord_index)
while blender_primitive["attributes"].get(tex_coord_id) is not None:
tex_coord = blender_primitive["attributes"][tex_coord_id]
- attributes[tex_coord_id] = gltf2_io.Accessor(
- buffer_view=gltf2_io_binary_data.BinaryData.from_list(
- tex_coord, gltf2_io_constants.ComponentType.Float),
- byte_offset=None,
+ attributes[tex_coord_id] = array_to_accessor(
+ tex_coord,
component_type=gltf2_io_constants.ComponentType.Float,
- count=len(tex_coord) // gltf2_io_constants.DataType.num_elements(gltf2_io_constants.DataType.Vec2),
- extensions=None,
- extras=None,
- max=None,
- min=None,
- name=None,
- normalized=None,
- sparse=None,
- type=gltf2_io_constants.DataType.Vec2
+ data_type=gltf2_io_constants.DataType.Vec2,
)
tex_coord_index += 1
tex_coord_id = 'TEXCOORD_' + str(tex_coord_index)
@@ -138,20 +136,10 @@ def __gather_colors(blender_primitive, export_settings):
color_id = 'COLOR_' + str(color_index)
while blender_primitive["attributes"].get(color_id) is not None:
internal_color = blender_primitive["attributes"][color_id]
- attributes[color_id] = gltf2_io.Accessor(
- buffer_view=gltf2_io_binary_data.BinaryData.from_list(
- internal_color, gltf2_io_constants.ComponentType.Float),
- byte_offset=None,
+ attributes[color_id] = array_to_accessor(
+ internal_color,
component_type=gltf2_io_constants.ComponentType.Float,
- count=len(internal_color) // gltf2_io_constants.DataType.num_elements(gltf2_io_constants.DataType.Vec4),
- extensions=None,
- extras=None,
- max=None,
- min=None,
- name=None,
- normalized=None,
- sparse=None,
- type=gltf2_io_constants.DataType.Vec4
+ data_type=gltf2_io_constants.DataType.Vec4,
)
color_index += 1
color_id = 'COLOR_' + str(color_index)
@@ -173,20 +161,13 @@ def __gather_skins(blender_primitive, export_settings):
# joints
internal_joint = blender_primitive["attributes"][joint_id]
- joint = gltf2_io.Accessor(
- buffer_view=gltf2_io_binary_data.BinaryData.from_list(
- internal_joint, gltf2_io_constants.ComponentType.UnsignedShort),
- byte_offset=None,
- component_type=gltf2_io_constants.ComponentType.UnsignedShort,
- count=len(internal_joint) // gltf2_io_constants.DataType.num_elements(gltf2_io_constants.DataType.Vec4),
- extensions=None,
- extras=None,
- max=None,
- min=None,
- name=None,
- normalized=None,
- sparse=None,
- type=gltf2_io_constants.DataType.Vec4
+ component_type = gltf2_io_constants.ComponentType.UnsignedShort
+ if max(internal_joint) < 256:
+ component_type = gltf2_io_constants.ComponentType.UnsignedByte
+ joint = array_to_accessor(
+ internal_joint,
+ component_type,
+ data_type=gltf2_io_constants.DataType.Vec4,
)
attributes[joint_id] = joint
@@ -201,21 +182,10 @@ def __gather_skins(blender_primitive, export_settings):
factor = 1.0 / total
internal_weight[idx:idx + 4] = [w * factor for w in weight_slice]
- weight = gltf2_io.Accessor(
- buffer_view=gltf2_io_binary_data.BinaryData.from_list(
- internal_weight, gltf2_io_constants.ComponentType.Float),
- byte_offset=None,
+ weight = array_to_accessor(
+ internal_weight,
component_type=gltf2_io_constants.ComponentType.Float,
- count=len(internal_weight) // gltf2_io_constants.DataType.num_elements(
- gltf2_io_constants.DataType.Vec4),
- extensions=None,
- extras=None,
- max=None,
- min=None,
- name=None,
- normalized=None,
- sparse=None,
- type=gltf2_io_constants.DataType.Vec4
+ data_type=gltf2_io_constants.DataType.Vec4,
)
attributes[weight_id] = weight
diff --git a/io_scene_gltf2/blender/exp/gltf2_blender_gather_primitives.py b/io_scene_gltf2/blender/exp/gltf2_blender_gather_primitives.py
index 22f0bc6d..1a2ae00d 100755
--- a/io_scene_gltf2/blender/exp/gltf2_blender_gather_primitives.py
+++ b/io_scene_gltf2/blender/exp/gltf2_blender_gather_primitives.py
@@ -21,7 +21,6 @@ from io_scene_gltf2.blender.exp.gltf2_blender_gather_cache import cached
from io_scene_gltf2.blender.exp import gltf2_blender_extract
from io_scene_gltf2.blender.exp import gltf2_blender_gather_accessors
from io_scene_gltf2.blender.exp import gltf2_blender_gather_primitive_attributes
-from io_scene_gltf2.blender.exp import gltf2_blender_utils
from io_scene_gltf2.blender.exp import gltf2_blender_gather_materials
from io_scene_gltf2.io.com import gltf2_io
@@ -160,26 +159,11 @@ def __gather_targets(blender_primitive, blender_mesh, modifiers, export_settings
if blender_primitive["attributes"].get(target_position_id):
target = {}
internal_target_position = blender_primitive["attributes"][target_position_id]
- binary_data = gltf2_io_binary_data.BinaryData.from_list(
+ target["POSITION"] = gltf2_blender_gather_primitive_attributes.array_to_accessor(
internal_target_position,
- gltf2_io_constants.ComponentType.Float
- )
- target["POSITION"] = gltf2_io.Accessor(
- buffer_view=binary_data,
- byte_offset=None,
component_type=gltf2_io_constants.ComponentType.Float,
- count=len(internal_target_position) // gltf2_io_constants.DataType.num_elements(
- gltf2_io_constants.DataType.Vec3),
- extensions=None,
- extras=None,
- max=gltf2_blender_utils.max_components(
- internal_target_position, gltf2_io_constants.DataType.Vec3),
- min=gltf2_blender_utils.min_components(
- internal_target_position, gltf2_io_constants.DataType.Vec3),
- name=None,
- normalized=None,
- sparse=None,
- type=gltf2_io_constants.DataType.Vec3
+ data_type=gltf2_io_constants.DataType.Vec3,
+ include_max_and_min=True,
)
if export_settings[NORMALS] \
@@ -187,48 +171,20 @@ def __gather_targets(blender_primitive, blender_mesh, modifiers, export_settings
and blender_primitive["attributes"].get(target_normal_id):
internal_target_normal = blender_primitive["attributes"][target_normal_id]
- binary_data = gltf2_io_binary_data.BinaryData.from_list(
+ target['NORMAL'] = gltf2_blender_gather_primitive_attributes.array_to_accessor(
internal_target_normal,
- gltf2_io_constants.ComponentType.Float,
- )
- target['NORMAL'] = gltf2_io.Accessor(
- buffer_view=binary_data,
- byte_offset=None,
component_type=gltf2_io_constants.ComponentType.Float,
- count=len(internal_target_normal) // gltf2_io_constants.DataType.num_elements(
- gltf2_io_constants.DataType.Vec3),
- extensions=None,
- extras=None,
- max=None,
- min=None,
- name=None,
- normalized=None,
- sparse=None,
- type=gltf2_io_constants.DataType.Vec3
+ data_type=gltf2_io_constants.DataType.Vec3,
)
if export_settings[TANGENTS] \
and export_settings[MORPH_TANGENT] \
and blender_primitive["attributes"].get(target_tangent_id):
internal_target_tangent = blender_primitive["attributes"][target_tangent_id]
- binary_data = gltf2_io_binary_data.BinaryData.from_list(
+ target['TANGENT'] = gltf2_blender_gather_primitive_attributes.array_to_accessor(
internal_target_tangent,
- gltf2_io_constants.ComponentType.Float,
- )
- target['TANGENT'] = gltf2_io.Accessor(
- buffer_view=binary_data,
- byte_offset=None,
component_type=gltf2_io_constants.ComponentType.Float,
- count=len(internal_target_tangent) // gltf2_io_constants.DataType.num_elements(
- gltf2_io_constants.DataType.Vec3),
- extensions=None,
- extras=None,
- max=None,
- min=None,
- name=None,
- normalized=None,
- sparse=None,
- type=gltf2_io_constants.DataType.Vec3
+ data_type=gltf2_io_constants.DataType.Vec3,
)
targets.append(target)
morph_index += 1
diff --git a/io_scene_gltf2/blender/exp/gltf2_blender_gather_skins.py b/io_scene_gltf2/blender/exp/gltf2_blender_gather_skins.py
index fa95e543..7f645272 100755
--- a/io_scene_gltf2/blender/exp/gltf2_blender_gather_skins.py
+++ b/io_scene_gltf2/blender/exp/gltf2_blender_gather_skins.py
@@ -20,7 +20,6 @@ from io_scene_gltf2.io.exp import gltf2_io_binary_data
from io_scene_gltf2.io.com import gltf2_io_constants
from io_scene_gltf2.blender.exp import gltf2_blender_gather_accessors
from io_scene_gltf2.blender.exp import gltf2_blender_gather_joints
-from io_scene_gltf2.blender.com import gltf2_blender_math
from io_scene_gltf2.io.exp.gltf2_io_user_extensions import export_user_extensions
@@ -85,10 +84,10 @@ def __gather_inverse_bind_matrices(blender_object, export_settings):
# traverse the matrices in the same order as the joints and compute the inverse bind matrix
def __collect_matrices(bone):
- inverse_bind_matrix = gltf2_blender_math.multiply(
- axis_basis_change,
- gltf2_blender_math.multiply(
- blender_object.matrix_world,
+ inverse_bind_matrix = (
+ axis_basis_change @
+ (
+ blender_object.matrix_world @
bone.bone.matrix_local
)
).inverted()
diff --git a/io_scene_gltf2/blender/exp/gltf2_blender_utils.py b/io_scene_gltf2/blender/exp/gltf2_blender_utils.py
deleted file mode 100755
index 8d5baae7..00000000
--- a/io_scene_gltf2/blender/exp/gltf2_blender_utils.py
+++ /dev/null
@@ -1,67 +0,0 @@
-# Copyright 2018 The glTF-Blender-IO authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import math
-from io_scene_gltf2.io.com import gltf2_io_constants
-
-
-# TODO: we could apply functional programming to these problems (currently we only have a single use case)
-
-def split_list_by_data_type(l: list, data_type: gltf2_io_constants.DataType):
- """
- Split a flat list of components by their data type.
-
- E.g.: A list [0,1,2,3,4,5] of data type Vec3 would be split to [[0,1,2], [3,4,5]]
- :param l: the flat list
- :param data_type: the data type of the list
- :return: a list of lists, where each element list contains the components of the data type
- """
- if not (len(l) % gltf2_io_constants.DataType.num_elements(data_type) == 0):
- raise ValueError("List length does not match specified data type")
- num_elements = gltf2_io_constants.DataType.num_elements(data_type)
- return [l[i:i + num_elements] for i in range(0, len(l), num_elements)]
-
-
-def max_components(l: list, data_type: gltf2_io_constants.DataType) -> list:
- """
- Find the maximum components in a flat list.
-
- This is required, for example, for the glTF2.0 accessor min and max properties
- :param l: the flat list of components
- :param data_type: the data type of the list (determines the length of the result)
- :return: a list with length num_elements(data_type) containing the maximum per component along the list
- """
- components_lists = split_list_by_data_type(l, data_type)
- result = [-math.inf] * gltf2_io_constants.DataType.num_elements(data_type)
- for components in components_lists:
- for i, c in enumerate(components):
- result[i] = max(result[i], c)
- return result
-
-
-def min_components(l: list, data_type: gltf2_io_constants.DataType) -> list:
- """
- Find the minimum components in a flat list.
-
- This is required, for example, for the glTF2.0 accessor min and max properties
- :param l: the flat list of components
- :param data_type: the data type of the list (determines the length of the result)
- :return: a list with length num_elements(data_type) containing the minimum per component along the list
- """
- components_lists = split_list_by_data_type(l, data_type)
- result = [math.inf] * gltf2_io_constants.DataType.num_elements(data_type)
- for components in components_lists:
- for i, c in enumerate(components):
- result[i] = min(result[i], c)
- return result
diff --git a/io_scene_gltf2/blender/imp/gltf2_blender_KHR_materials_pbrSpecularGlossiness.py b/io_scene_gltf2/blender/imp/gltf2_blender_KHR_materials_pbrSpecularGlossiness.py
index ce5e1aed..bb1bf272 100755
--- a/io_scene_gltf2/blender/imp/gltf2_blender_KHR_materials_pbrSpecularGlossiness.py
+++ b/io_scene_gltf2/blender/imp/gltf2_blender_KHR_materials_pbrSpecularGlossiness.py
@@ -79,7 +79,8 @@ def pbr_specular_glossiness(mh):
)
if mh.pymat.occlusion_texture is not None:
- node = make_settings_node(mh, location=(610, -1060))
+ node = make_settings_node(mh)
+ node.location = (610, -1060)
occlusion(
mh,
location=(510, -970),
diff --git a/io_scene_gltf2/blender/imp/gltf2_blender_gltf.py b/io_scene_gltf2/blender/imp/gltf2_blender_gltf.py
index efa7f003..226720a3 100755
--- a/io_scene_gltf2/blender/imp/gltf2_blender_gltf.py
+++ b/io_scene_gltf2/blender/imp/gltf2_blender_gltf.py
@@ -50,9 +50,9 @@ class BlenderGlTF():
@staticmethod
def set_convert_functions(gltf):
- yup2zup = bpy.app.debug_value != 100
+ gltf.yup2zup = bpy.app.debug_value != 100
- if yup2zup:
+ if gltf.yup2zup:
# glTF Y-Up space --> Blender Z-up space
# X,Y,Z --> X,-Z,Y
def convert_loc(x): return Vector([x[0], -x[2], x[1]])
diff --git a/io_scene_gltf2/blender/imp/gltf2_blender_mesh.py b/io_scene_gltf2/blender/imp/gltf2_blender_mesh.py
index 7914a41b..e13b9c8f 100755
--- a/io_scene_gltf2/blender/imp/gltf2_blender_mesh.py
+++ b/io_scene_gltf2/blender/imp/gltf2_blender_mesh.py
@@ -13,11 +13,12 @@
# limitations under the License.
import bpy
-import bmesh
+from mathutils import Vector, Matrix
+import numpy as np
+from ...io.imp.gltf2_io_binary import BinaryData
from ..com.gltf2_blender_extras import set_extras
from .gltf2_blender_material import BlenderMaterial
-from .gltf2_blender_primitive import BlenderPrimitive
class BlenderMesh():
@@ -28,118 +29,614 @@ class BlenderMesh():
@staticmethod
def create(gltf, mesh_idx, skin_idx):
"""Mesh creation."""
- pymesh = gltf.data.meshes[mesh_idx]
+ return create_mesh(gltf, mesh_idx, skin_idx)
- # Create one bmesh, add all primitives to it, and then convert it to a
- # mesh.
- bme = bmesh.new()
- # List of all the materials this mesh will use. The material each
- # primitive uses is set by giving an index into this list.
- materials = []
+# Maximum number of TEXCOORD_n/COLOR_n sets to import
+UV_MAX = 8
+COLOR_MAX = 8
- # Process all primitives
- for prim in pymesh.primitives:
- if prim.material is None:
- material_idx = None
- else:
- pymaterial = gltf.data.materials[prim.material]
-
- vertex_color = None
- if 'COLOR_0' in prim.attributes:
- vertex_color = 'COLOR_0'
- # Create Blender material if needed
- if vertex_color not in pymaterial.blender_material:
- BlenderMaterial.create(gltf, prim.material, vertex_color)
- material_name = pymaterial.blender_material[vertex_color]
- material = bpy.data.materials[material_name]
+def create_mesh(gltf, mesh_idx, skin_idx):
+ pymesh = gltf.data.meshes[mesh_idx]
+ name = pymesh.name or 'Mesh_%d' % mesh_idx
+ mesh = bpy.data.meshes.new(name)
- try:
- material_idx = materials.index(material.name)
- except ValueError:
- materials.append(material.name)
- material_idx = len(materials) - 1
+ # Temporarily parent the mesh to an object.
+ # This is used to set skin weights and shapekeys.
+ tmp_ob = None
+ try:
+ tmp_ob = bpy.data.objects.new('##gltf-import:tmp-object##', mesh)
+ do_primitives(gltf, mesh_idx, skin_idx, mesh, tmp_ob)
- BlenderPrimitive.add_primitive_to_bmesh(gltf, bme, pymesh, prim, skin_idx, material_idx)
+ finally:
+ if tmp_ob:
+ bpy.data.objects.remove(tmp_ob)
- name = pymesh.name or 'Mesh_' + str(mesh_idx)
- mesh = bpy.data.meshes.new(name)
- BlenderMesh.bmesh_to_mesh(gltf, pymesh, bme, mesh)
- bme.free()
- for name_material in materials:
- mesh.materials.append(bpy.data.materials[name_material])
- mesh.update()
+ return mesh
- set_extras(mesh, pymesh.extras, exclude=['targetNames'])
- # Clear accessor cache after all primitives are done
- gltf.accessor_cache = {}
+def do_primitives(gltf, mesh_idx, skin_idx, mesh, ob):
+ """Put all primitive data into the mesh."""
+ pymesh = gltf.data.meshes[mesh_idx]
- return mesh
+ # Scan the primitives to find out what we need to create
- @staticmethod
- def bmesh_to_mesh(gltf, pymesh, bme, mesh):
- bme.to_mesh(mesh)
-
- # Unfortunately need to do shapekeys/normals/smoothing ourselves.
-
- # Shapekeys
- if len(bme.verts.layers.shape) != 0:
- # The only way I could find to create a shape key was to temporarily
- # parent mesh to an object and use obj.shape_key_add.
- tmp_ob = None
- try:
- tmp_ob = bpy.data.objects.new('##gltf-import:tmp-object##', mesh)
- tmp_ob.shape_key_add(name='Basis')
- mesh.shape_keys.name = mesh.name
- for layer_name in bme.verts.layers.shape.keys():
- tmp_ob.shape_key_add(name=layer_name)
- key_block = mesh.shape_keys.key_blocks[layer_name]
- layer = bme.verts.layers.shape[layer_name]
-
- for i, v in enumerate(bme.verts):
- key_block.data[i].co = v[layer]
- finally:
- if tmp_ob:
- bpy.data.objects.remove(tmp_ob)
-
- # Normals
- mesh.update()
+ has_normals = False
+ num_uvs = 0
+ num_cols = 0
+ num_joint_sets = 0
+ for prim in pymesh.primitives:
+ if 'POSITION' not in prim.attributes:
+ continue
if gltf.import_settings['import_shading'] == "NORMALS":
- mesh.create_normals_split()
-
- use_smooths = [] # whether to smooth for each poly
- face_idx = 0
- for prim in pymesh.primitives:
- if gltf.import_settings['import_shading'] == "FLAT" or \
- 'NORMAL' not in prim.attributes:
- use_smooths += [False] * prim.num_faces
- elif gltf.import_settings['import_shading'] == "SMOOTH":
- use_smooths += [True] * prim.num_faces
- elif gltf.import_settings['import_shading'] == "NORMALS":
- mesh_loops = mesh.loops
- for fi in range(face_idx, face_idx + prim.num_faces):
- poly = mesh.polygons[fi]
- # "Flat normals" are when all the vertices in poly have the
- # poly's normal. Otherwise, smooth the poly.
- for loop_idx in range(poly.loop_start, poly.loop_start + poly.loop_total):
- vi = mesh_loops[loop_idx].vertex_index
- if poly.normal.dot(bme.verts[vi].normal) <= 0.9999999:
- use_smooths.append(True)
- break
- else:
- use_smooths.append(False)
+ if 'NORMAL' in prim.attributes:
+ has_normals = True
+
+ if skin_idx is not None:
+ i = 0
+ while ('JOINTS_%d' % i) in prim.attributes and \
+ ('WEIGHTS_%d' % i) in prim.attributes:
+ i += 1
+ num_joint_sets = max(i, num_joint_sets)
+
+ i = 0
+ while i < UV_MAX and ('TEXCOORD_%d' % i) in prim.attributes: i += 1
+ num_uvs = max(i, num_uvs)
+
+ i = 0
+ while i < COLOR_MAX and ('COLOR_%d' % i) in prim.attributes: i += 1
+ num_cols = max(i, num_cols)
+
+ num_shapekeys = 0
+ for morph_i, _ in enumerate(pymesh.primitives[0].targets or []):
+ if pymesh.shapekey_names[morph_i] is not None:
+ num_shapekeys += 1
+
+ # -------------
+ # We'll process all the primitives gathering arrays to feed into the
+ # various foreach_set function that create the mesh data.
+
+ num_faces = 0 # total number of faces
+ vert_locs = np.empty(dtype=np.float32, shape=(0,3)) # coordinate for each vert
+ vert_normals = np.empty(dtype=np.float32, shape=(0,3)) # normal for each vert
+ edge_vidxs = np.array([], dtype=np.uint32) # vertex_index for each loose edge
+ loop_vidxs = np.array([], dtype=np.uint32) # vertex_index for each loop
+ loop_uvs = [
+ np.empty(dtype=np.float32, shape=(0,2)) # UV for each loop for each layer
+ for _ in range(num_uvs)
+ ]
+ loop_cols = [
+ np.empty(dtype=np.float32, shape=(0,4)) # color for each loop for each layer
+ for _ in range(num_cols)
+ ]
+ vert_joints = [
+ np.empty(dtype=np.uint32, shape=(0,4)) # 4 joints for each vert for each set
+ for _ in range(num_joint_sets)
+ ]
+ vert_weights = [
+ np.empty(dtype=np.float32, shape=(0,4)) # 4 weights for each vert for each set
+ for _ in range(num_joint_sets)
+ ]
+ sk_vert_locs = [
+ np.empty(dtype=np.float32, shape=(0,3)) # coordinate for each vert for each shapekey
+ for _ in range(num_shapekeys)
+ ]
+
+ for prim in pymesh.primitives:
+ prim.num_faces = 0
+
+ if 'POSITION' not in prim.attributes:
+ continue
+
+ vert_index_base = len(vert_locs)
+
+ if prim.indices is not None:
+ indices = BinaryData.decode_accessor(gltf, prim.indices)
+ indices = indices.reshape(len(indices))
+ else:
+ num_verts = gltf.data.accessors[prim.attributes['POSITION']].count
+ indices = np.arange(0, num_verts, dtype=np.uint32)
+
+ mode = 4 if prim.mode is None else prim.mode
+ points, edges, tris = points_edges_tris(mode, indices)
+ if points is not None:
+ indices = points
+ elif edges is not None:
+ indices = edges
+ else:
+ indices = tris
+
+ # We'll add one vert to the arrays for each index used in indices
+ unique_indices, inv_indices = np.unique(indices, return_inverse=True)
+
+ vs = BinaryData.decode_accessor(gltf, prim.attributes['POSITION'], cache=True)
+ vert_locs = np.concatenate((vert_locs, vs[unique_indices]))
+
+ if has_normals:
+ if 'NORMAL' in prim.attributes:
+ ns = BinaryData.decode_accessor(gltf, prim.attributes['NORMAL'], cache=True)
+ ns = ns[unique_indices]
else:
- # shouldn't happen
- assert False
+ ns = np.zeros((len(unique_indices), 3), dtype=np.float32)
+ vert_normals = np.concatenate((vert_normals, ns))
- face_idx += prim.num_faces
- mesh.polygons.foreach_set('use_smooth', use_smooths)
+ for i in range(num_joint_sets):
+ if ('JOINTS_%d' % i) in prim.attributes and ('WEIGHTS_%d' % i) in prim.attributes:
+ js = BinaryData.decode_accessor(gltf, prim.attributes['JOINTS_%d' % i], cache=True)
+ ws = BinaryData.decode_accessor(gltf, prim.attributes['WEIGHTS_%d' % i], cache=True)
+ js = js[unique_indices]
+ ws = ws[unique_indices]
+ else:
+ js = np.zeros((len(unique_indices), 4), dtype=np.uint32)
+ ws = np.zeros((len(unique_indices), 4), dtype=np.float32)
+ vert_joints[i] = np.concatenate((vert_joints[i], js))
+ vert_weights[i] = np.concatenate((vert_weights[i], ws))
- # Custom normals, now that every update is done
- if gltf.import_settings['import_shading'] == "NORMALS":
- custom_normals = [v.normal for v in bme.verts]
- mesh.normals_split_custom_set_from_vertices(custom_normals)
- mesh.use_auto_smooth = True
+ for morph_i, target in enumerate(prim.targets or []):
+ if pymesh.shapekey_names[morph_i] is None:
+ continue
+ morph_vs = BinaryData.decode_accessor(gltf, target['POSITION'], cache=True)
+ morph_vs = morph_vs[unique_indices]
+ sk_vert_locs[morph_i] = np.concatenate((sk_vert_locs[morph_i], morph_vs))
+
+ # inv_indices are the indices into the verts just for this prim;
+ # calculate indices into the overall verts array
+ prim_vidxs = inv_indices.astype(np.uint32, copy=False)
+ prim_vidxs += vert_index_base # offset for verts from previous prims
+
+ if edges is not None:
+ edge_vidxs = np.concatenate((edge_vidxs, prim_vidxs))
+
+ if tris is not None:
+ prim.num_faces = len(indices) // 3
+ num_faces += prim.num_faces
+
+ loop_vidxs = np.concatenate((loop_vidxs, prim_vidxs))
+
+ for uv_i in range(num_uvs):
+ if ('TEXCOORD_%d' % uv_i) in prim.attributes:
+ uvs = BinaryData.decode_accessor(gltf, prim.attributes['TEXCOORD_%d' % uv_i], cache=True)
+ uvs = uvs[indices]
+ else:
+ uvs = np.zeros((len(indices), 2), dtype=np.float32)
+ loop_uvs[uv_i] = np.concatenate((loop_uvs[uv_i], uvs))
+
+ for col_i in range(num_cols):
+ if ('COLOR_%d' % col_i) in prim.attributes:
+ cols = BinaryData.decode_accessor(gltf, prim.attributes['COLOR_%d' % col_i], cache=True)
+ cols = cols[indices]
+ if cols.shape[1] == 3:
+ cols = colors_rgb_to_rgba(cols)
+ else:
+ cols = np.ones((len(indices), 4), dtype=np.float32)
+ loop_cols[col_i] = np.concatenate((loop_cols[col_i], cols))
+
+ # Accessors are cached in case they are shared between primitives; clear
+ # the cache now that all prims are done.
+ gltf.decode_accessor_cache = {}
+
+ if gltf.import_settings['merge_vertices']:
+ vert_locs, vert_normals, vert_joints, vert_weights, \
+ sk_vert_locs, loop_vidxs, edge_vidxs = \
+ merge_duplicate_verts(
+ vert_locs, vert_normals, vert_joints, vert_weights, \
+ sk_vert_locs, loop_vidxs, edge_vidxs\
+ )
+
+ # ---------------
+ # Convert all the arrays glTF -> Blender
+
+ # Change from relative to absolute positions for morph locs
+ for sk_locs in sk_vert_locs:
+ sk_locs += vert_locs
+
+ if gltf.yup2zup:
+ locs_yup_to_zup(vert_locs)
+ locs_yup_to_zup(vert_normals)
+ for sk_locs in sk_vert_locs:
+ locs_yup_to_zup(sk_locs)
+
+ if num_joint_sets:
+ skin_into_bind_pose(
+ gltf, skin_idx, vert_joints, vert_weights,
+ locs=[vert_locs] + sk_vert_locs,
+ vert_normals=vert_normals,
+ )
+
+ for uvs in loop_uvs:
+ uvs_gltf_to_blender(uvs)
+
+ for cols in loop_cols:
+ colors_linear_to_srgb(cols[:, :-1])
+
+ # ---------------
+ # Start creating things
+
+ mesh.vertices.add(len(vert_locs))
+ mesh.vertices.foreach_set('co', squish(vert_locs))
+
+ mesh.loops.add(len(loop_vidxs))
+ mesh.loops.foreach_set('vertex_index', loop_vidxs)
+
+ mesh.edges.add(len(edge_vidxs) // 2)
+ mesh.edges.foreach_set('vertices', edge_vidxs)
+
+ mesh.polygons.add(num_faces)
+
+ # All polys are tris
+ loop_starts = np.arange(0, 3 * num_faces, step=3)
+ loop_totals = np.full(num_faces, 3)
+ mesh.polygons.foreach_set('loop_start', loop_starts)
+ mesh.polygons.foreach_set('loop_total', loop_totals)
+
+ for uv_i in range(num_uvs):
+ name = 'UVMap' if uv_i == 0 else 'UVMap.%03d' % uv_i
+ layer = mesh.uv_layers.new(name=name)
+ layer.data.foreach_set('uv', squish(loop_uvs[uv_i]))
+
+ for col_i in range(num_cols):
+ name = 'Col' if col_i == 0 else 'Col.%03d' % col_i
+ layer = mesh.vertex_colors.new(name=name)
+
+ layer.data.foreach_set('color', squish(loop_cols[col_i]))
+
+ # Skinning
+ # TODO: this is slow :/
+ if num_joint_sets:
+ pyskin = gltf.data.skins[skin_idx]
+ for i, _ in enumerate(pyskin.joints):
+ # ob is a temp object, so don't worry about the name.
+ ob.vertex_groups.new(name='X%d' % i)
+
+ vgs = list(ob.vertex_groups)
+
+ for i in range(num_joint_sets):
+ js = vert_joints[i].tolist() # tolist() is faster
+ ws = vert_weights[i].tolist()
+ for vi in range(len(vert_locs)):
+ w0, w1, w2, w3 = ws[vi]
+ j0, j1, j2, j3 = js[vi]
+ if w0 != 0: vgs[j0].add((vi,), w0, 'REPLACE')
+ if w1 != 0: vgs[j1].add((vi,), w1, 'REPLACE')
+ if w2 != 0: vgs[j2].add((vi,), w2, 'REPLACE')
+ if w3 != 0: vgs[j3].add((vi,), w3, 'REPLACE')
+
+ # Shapekeys
+ if num_shapekeys:
+ ob.shape_key_add(name='Basis')
+ mesh.shape_keys.name = mesh.name
+
+ sk_i = 0
+ for sk_name in pymesh.shapekey_names:
+ if sk_name is None:
+ continue
+
+ ob.shape_key_add(name=sk_name)
+ key_block = mesh.shape_keys.key_blocks[sk_name]
+ key_block.data.foreach_set('co', squish(sk_vert_locs[sk_i]))
+
+ sk_i += 1
+
+ # ----
+ # Assign materials to faces
+
+ # Initialize to no-material, ie. an index guaranteed to be OOB for the
+ # material slots. A mesh obviously can't have more materials than it has
+ # primitives...
+ oob_material_idx = len(pymesh.primitives)
+ material_indices = np.full(num_faces, oob_material_idx)
+
+ f = 0
+ for prim in pymesh.primitives:
+ if prim.material is not None:
+ # Get the material
+ pymaterial = gltf.data.materials[prim.material]
+ vertex_color = 'COLOR_0' if 'COLOR_0' in prim.attributes else None
+ if vertex_color not in pymaterial.blender_material:
+ BlenderMaterial.create(gltf, prim.material, vertex_color)
+ material_name = pymaterial.blender_material[vertex_color]
+
+ # Put material in slot (if not there)
+ if material_name not in mesh.materials:
+ mesh.materials.append(bpy.data.materials[material_name])
+ material_index = mesh.materials.find(material_name)
+
+ material_indices[f:f + prim.num_faces].fill(material_index)
+
+ f += prim.num_faces
+
+ mesh.polygons.foreach_set('material_index', material_indices)
+
+ # ----
+ # Normals
+
+ # Set poly smoothing
+ # TODO: numpyify?
+ smooths = [] # use_smooth for each poly
+ f = 0
+ for prim in pymesh.primitives:
+ if gltf.import_settings['import_shading'] == "FLAT" or \
+ 'NORMAL' not in prim.attributes:
+ smooths += [False] * prim.num_faces
+
+ elif gltf.import_settings['import_shading'] == "SMOOTH":
+ smooths += [True] * prim.num_faces
+
+ elif gltf.import_settings['import_shading'] == "NORMALS":
+ for fi in range(f, f + prim.num_faces):
+ # Make the face flat if the face's normal is
+ # equal to all of its loops' normals.
+ poly_normal = mesh.polygons[fi].normal
+ smooths.append(
+ poly_normal.dot(vert_normals[loop_vidxs[3*fi + 0]]) <= 0.9999999 or
+ poly_normal.dot(vert_normals[loop_vidxs[3*fi + 1]]) <= 0.9999999 or
+ poly_normal.dot(vert_normals[loop_vidxs[3*fi + 2]]) <= 0.9999999
+ )
+
+ f += prim.num_faces
+
+ mesh.polygons.foreach_set('use_smooth', smooths)
+
+ mesh.validate()
+ has_loose_edges = len(edge_vidxs) != 0 # need to calc_loose_edges for them to show up
+ mesh.update(calc_edges_loose=has_loose_edges)
+
+ if has_normals:
+ mesh.create_normals_split()
+ mesh.normals_split_custom_set_from_vertices(vert_normals)
+ mesh.use_auto_smooth = True
+
+
+def points_edges_tris(mode, indices):
+ points = None
+ edges = None
+ tris = None
+
+ if mode == 0:
+ # POINTS
+ points = indices
+
+ elif mode == 1:
+ # LINES
+ # 1 3
+ # / /
+ # 0 2
+ edges = indices
+
+ elif mode == 2:
+ # LINE LOOP
+ # 1---2
+ # / \
+ # 0-------3
+ # in: 0123
+ # out: 01122330
+ edges = np.empty(2 * len(indices), dtype=np.uint32)
+ edges[[0, -1]] = indices[[0, 0]] # 0______0
+ edges[1:-1] = np.repeat(indices[1:], 2) # 01122330
+
+ elif mode == 3:
+ # LINE STRIP
+ # 1---2
+ # / \
+ # 0 3
+ # in: 0123
+ # out: 011223
+ edges = np.empty(2 * len(indices) - 2, dtype=np.uint32)
+ edges[[0, -1]] = indices[[0, -1]] # 0____3
+ edges[1:-1] = np.repeat(indices[1:-1], 2) # 011223
+
+ elif mode == 4:
+ # TRIANGLES
+ # 2 3
+ # / \ / \
+ # 0---1 4---5
+ tris = indices
+
+ elif mode == 5:
+ # TRIANGLE STRIP
+ # 0---2---4
+ # \ / \ /
+ # 1---3
+ # TODO: numpyify
+ def alternate(i, xs):
+ even = i % 2 == 0
+ return xs if even else (xs[0], xs[2], xs[1])
+ tris = np.array([
+ alternate(i, (indices[i], indices[i + 1], indices[i + 2]))
+ for i in range(0, len(indices) - 2)
+ ])
+ tris = squish(tris)
+
+ elif mode == 6:
+ # TRIANGLE FAN
+ # 3---2
+ # / \ / \
+ # 4---0---1
+ # TODO: numpyify
+ tris = np.array([
+ (indices[0], indices[i], indices[i + 1])
+ for i in range(1, len(indices) - 1)
+ ])
+ tris = squish(tris)
+
+ else:
+ raise Exception('primitive mode unimplemented: %d' % mode)
+
+ return points, edges, tris
+
+
+def squish(array):
+ """Squish nD array into 1D array (required by foreach_set)."""
+ return array.reshape(array.size)
+
+
+def colors_rgb_to_rgba(rgb):
+ rgba = np.ones((len(rgb), 4), dtype=np.float32)
+ rgba[:, :3] = rgb
+ return rgba
+
+
+def colors_linear_to_srgb(color):
+ assert color.shape[1] == 3 # only change RGB, not A
+
+ not_small = color >= 0.0031308
+ small_result = np.where(color < 0.0, 0.0, color * 12.92)
+ large_result = 1.055 * np.power(color, 1.0 / 2.4, where=not_small) - 0.055
+ color[:] = np.where(not_small, large_result, small_result)
+
+
+def locs_yup_to_zup(vecs):
+ # x,y,z -> x,-z,y
+ vecs[:, [1,2]] = vecs[:, [2,1]]
+ vecs[:, 1] *= -1
+
+
+def uvs_gltf_to_blender(uvs):
+ # u,v -> u,1-v
+ uvs[:, 1] *= -1
+ uvs[:, 1] += 1
+
+
+def skin_into_bind_pose(gltf, skin_idx, vert_joints, vert_weights, locs, vert_normals):
+ # Skin each position/normal using the bind pose.
+ # Skinning equation: vert' = sum_(j,w) w * joint_mat[j] * vert
+ # where the sum is over all (joint,weight) pairs.
+
+ # Calculate joint matrices
+ joint_mats = []
+ pyskin = gltf.data.skins[skin_idx]
+ if pyskin.inverse_bind_matrices is not None:
+ inv_binds = BinaryData.get_data_from_accessor(gltf, pyskin.inverse_bind_matrices)
+ inv_binds = [gltf.matrix_gltf_to_blender(m) for m in inv_binds]
+ else:
+ inv_binds = [Matrix.Identity(4) for i in range(len(pyskin.joints))]
+ bind_mats = [gltf.vnodes[joint].bind_arma_mat for joint in pyskin.joints]
+ joint_mats = [bind_mat @ inv_bind for bind_mat, inv_bind in zip(bind_mats, inv_binds)]
+
+ # TODO: check if joint_mats are all (approximately) 1, and skip skinning
+
+ joint_mats = np.array(joint_mats, dtype=np.float32)
+
+ # Compute the skinning matrices for every vert
+ num_verts = len(locs[0])
+ skinning_mats = np.zeros((num_verts, 4, 4), dtype=np.float32)
+ weight_sums = np.zeros(num_verts, dtype=np.float32)
+ for js, ws in zip(vert_joints, vert_weights):
+ for i in range(4):
+ skinning_mats += ws[:, i].reshape(len(ws), 1, 1) * joint_mats[js[:, i]]
+ weight_sums += ws[:, i]
+ # Normalize weights to one; necessary for old files / quantized weights
+ skinning_mats /= weight_sums.reshape(num_verts, 1, 1)
+
+ skinning_mats_3x3 = skinning_mats[:, :3, :3]
+ skinning_trans = skinning_mats[:, :3, 3]
+
+ for vs in locs:
+ vs[:] = mul_mats_vecs(skinning_mats_3x3, vs)
+ vs[:] += skinning_trans
+
+ if len(vert_normals) != 0:
+ vert_normals[:] = mul_mats_vecs(skinning_mats_3x3, vert_normals)
+ # Don't translate normals!
+ normalize_vecs(vert_normals)
+
+
+def mul_mats_vecs(mats, vecs):
+ """Given [m1,m2,...] and [v1,v2,...], returns [m1@v1,m2@v2,...]. 3D only."""
+ return np.matmul(mats, vecs.reshape(len(vecs), 3, 1)).reshape(len(vecs), 3)
+
+
+def normalize_vecs(vectors):
+ norms = np.linalg.norm(vectors, axis=1, keepdims=True)
+ np.divide(vectors, norms, out=vectors, where=norms != 0)
+
+
+def merge_duplicate_verts(vert_locs, vert_normals, vert_joints, vert_weights, sk_vert_locs, loop_vidxs, edge_vidxs):
+ # This function attempts to invert the splitting done when exporting to
+ # glTF. Welds together verts with the same per-vert data (but possibly
+ # different per-loop data).
+ #
+ # Ideally normals would be treated as per-loop data, but that has problems,
+ # so we currently treat the normal as per-vert.
+ #
+ # Strategy is simple: put all the per-vert data into an array of structs
+ # ("dots"), dedupe with np.unique, then take all the data back out.
+
+ # Very often two verts that "morally" should be merged will have normals
+ # with very small differences. Round off the normals to smooth this over.
+ if len(vert_normals) != 0:
+ vert_normals *= 50000
+ vert_normals[:] = np.trunc(vert_normals)
+ vert_normals *= (1/50000)
+
+ dot_fields = [('x', np.float32), ('y', np.float32), ('z', np.float32)]
+ if len(vert_normals) != 0:
+ dot_fields += [('nx', np.float32), ('ny', np.float32), ('nz', np.float32)]
+ for i, _ in enumerate(vert_joints):
+ dot_fields += [
+ ('joint%dx' % i, np.uint32), ('joint%dy' % i, np.uint32),
+ ('joint%dz' % i, np.uint32), ('joint%dw' % i, np.uint32),
+ ('weight%dx' % i, np.float32), ('weight%dy' % i, np.float32),
+ ('weight%dz' % i, np.float32), ('weight%dw' % i, np.float32),
+ ]
+ for i, _ in enumerate(sk_vert_locs):
+ dot_fields += [
+ ('sk%dx' % i, np.float32), ('sk%dy' % i, np.float32), ('sk%dz' % i, np.float32),
+ ]
+ dots = np.empty(len(vert_locs), dtype=np.dtype(dot_fields))
+
+ dots['x'] = vert_locs[:, 0]
+ dots['y'] = vert_locs[:, 1]
+ dots['z'] = vert_locs[:, 2]
+ if len(vert_normals) != 0:
+ dots['nx'] = vert_normals[:, 0]
+ dots['ny'] = vert_normals[:, 1]
+ dots['nz'] = vert_normals[:, 2]
+ for i, (joints, weights) in enumerate(zip(vert_joints, vert_weights)):
+ dots['joint%dx' % i] = joints[:, 0]
+ dots['joint%dy' % i] = joints[:, 1]
+ dots['joint%dz' % i] = joints[:, 2]
+ dots['joint%dw' % i] = joints[:, 3]
+ dots['weight%dx' % i] = weights[:, 0]
+ dots['weight%dy' % i] = weights[:, 1]
+ dots['weight%dz' % i] = weights[:, 2]
+ dots['weight%dw' % i] = weights[:, 3]
+ for i, locs in enumerate(sk_vert_locs):
+ dots['sk%dx' % i] = locs[:, 0]
+ dots['sk%dy' % i] = locs[:, 1]
+ dots['sk%dz' % i] = locs[:, 2]
+
+ unique_dots, inv_indices = np.unique(dots, return_inverse=True)
+
+ loop_vidxs = inv_indices[loop_vidxs]
+ edge_vidxs = inv_indices[edge_vidxs]
+
+ vert_locs = np.empty((len(unique_dots), 3), dtype=np.float32)
+ vert_locs[:, 0] = unique_dots['x']
+ vert_locs[:, 1] = unique_dots['y']
+ vert_locs[:, 2] = unique_dots['z']
+ if len(vert_normals) != 0:
+ vert_normals = np.empty((len(unique_dots), 3), dtype=np.float32)
+ vert_normals[:, 0] = unique_dots['nx']
+ vert_normals[:, 1] = unique_dots['ny']
+ vert_normals[:, 2] = unique_dots['nz']
+ for i in range(len(vert_joints)):
+ vert_joints[i] = np.empty((len(unique_dots), 4), dtype=np.uint32)
+ vert_joints[i][:, 0] = unique_dots['joint%dx' % i]
+ vert_joints[i][:, 1] = unique_dots['joint%dy' % i]
+ vert_joints[i][:, 2] = unique_dots['joint%dz' % i]
+ vert_joints[i][:, 3] = unique_dots['joint%dw' % i]
+ vert_weights[i] = np.empty((len(unique_dots), 4), dtype=np.float32)
+ vert_weights[i][:, 0] = unique_dots['weight%dx' % i]
+ vert_weights[i][:, 1] = unique_dots['weight%dy' % i]
+ vert_weights[i][:, 2] = unique_dots['weight%dz' % i]
+ vert_weights[i][:, 3] = unique_dots['weight%dw' % i]
+ for i in range(len(sk_vert_locs)):
+ sk_vert_locs[i] = np.empty((len(unique_dots), 3), dtype=np.float32)
+ sk_vert_locs[i][:, 0] = unique_dots['sk%dx' % i]
+ sk_vert_locs[i][:, 1] = unique_dots['sk%dy' % i]
+ sk_vert_locs[i][:, 2] = unique_dots['sk%dz' % i]
+
+ return vert_locs, vert_normals, vert_joints, vert_weights, sk_vert_locs, loop_vidxs, edge_vidxs
diff --git a/io_scene_gltf2/blender/imp/gltf2_blender_pbrMetallicRoughness.py b/io_scene_gltf2/blender/imp/gltf2_blender_pbrMetallicRoughness.py
index 00bd08d2..deb9e301 100755
--- a/io_scene_gltf2/blender/imp/gltf2_blender_pbrMetallicRoughness.py
+++ b/io_scene_gltf2/blender/imp/gltf2_blender_pbrMetallicRoughness.py
@@ -233,7 +233,7 @@ def base_color(
base_color_factor = [1, 1, 1, 1]
if base_color_texture is None and not mh.vertex_color:
- color_socket.default_value = base_color_factor
+ color_socket.default_value = base_color_factor[:3] + [1]
if alpha_socket is not None:
alpha_socket.default_value = base_color_factor[3]
return
@@ -242,10 +242,7 @@ def base_color(
needs_color_factor = base_color_factor[:3] != [1, 1, 1]
needs_alpha_factor = base_color_factor[3] != 1.0 and alpha_socket is not None
if needs_color_factor or needs_alpha_factor:
- # For now, always create the color factor node because the exporter
- # reads the alpha value from here. Can get rid of "or needs_alpha_factor"
- # when it learns to understand the alpha socket.
- if needs_color_factor or needs_alpha_factor:
+ if needs_color_factor:
node = mh.node_tree.nodes.new('ShaderNodeMixRGB')
node.label = 'Color Factor'
node.location = x - 140, y
@@ -255,7 +252,7 @@ def base_color(
# Inputs
node.inputs['Fac'].default_value = 1.0
color_socket = node.inputs['Color1']
- node.inputs['Color2'].default_value = base_color_factor
+ node.inputs['Color2'].default_value = base_color_factor[:3] + [1]
if needs_alpha_factor:
node = mh.node_tree.nodes.new('ShaderNodeMath')
diff --git a/io_scene_gltf2/blender/imp/gltf2_blender_primitive.py b/io_scene_gltf2/blender/imp/gltf2_blender_primitive.py
deleted file mode 100755
index d544778c..00000000
--- a/io_scene_gltf2/blender/imp/gltf2_blender_primitive.py
+++ /dev/null
@@ -1,344 +0,0 @@
-# Copyright 2018-2019 The glTF-Blender-IO authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import bpy
-from mathutils import Vector, Matrix
-
-from ...io.imp.gltf2_io_binary import BinaryData
-from ...io.com.gltf2_io_color_management import color_linear_to_srgb
-from ...io.com import gltf2_io_debug
-
-
-MAX_NUM_COLOR_SETS = 8
-MAX_NUM_TEXCOORD_SETS = 8
-
-class BlenderPrimitive():
- """Blender Primitive."""
- def __new__(cls, *args, **kwargs):
- raise RuntimeError("%s should not be instantiated" % cls)
-
- @staticmethod
- def get_layer(bme_layers, name):
- if name not in bme_layers:
- return bme_layers.new(name)
- return bme_layers[name]
-
- @staticmethod
- def add_primitive_to_bmesh(gltf, bme, pymesh, pyprimitive, skin_idx, material_index):
- attributes = pyprimitive.attributes
-
- if 'POSITION' not in attributes:
- pyprimitive.num_faces = 0
- return
-
- positions = BinaryData.get_data_from_accessor(gltf, attributes['POSITION'], cache=True)
-
- if pyprimitive.indices is not None:
- # Not using cache, this is not useful for indices
- indices = BinaryData.get_data_from_accessor(gltf, pyprimitive.indices)
- indices = [i[0] for i in indices]
- else:
- indices = list(range(len(positions)))
-
- bme_verts = bme.verts
- bme_edges = bme.edges
- bme_faces = bme.faces
-
- # Gather up the joints/weights (multiple sets allow >4 influences)
- joint_sets = []
- weight_sets = []
- set_num = 0
- while 'JOINTS_%d' % set_num in attributes and 'WEIGHTS_%d' % set_num in attributes:
- joint_data = BinaryData.get_data_from_accessor(gltf, attributes['JOINTS_%d' % set_num], cache=True)
- weight_data = BinaryData.get_data_from_accessor(gltf, attributes['WEIGHTS_%d' % set_num], cache=True)
-
- joint_sets.append(joint_data)
- weight_sets.append(weight_data)
-
- set_num += 1
-
- # For skinned meshes, we will need to calculate the position of the
- # verts in the bind pose, ie. the pose the edit bones are in.
- if skin_idx is not None:
- pyskin = gltf.data.skins[skin_idx]
- if pyskin.inverse_bind_matrices is not None:
- inv_binds = BinaryData.get_data_from_accessor(gltf, pyskin.inverse_bind_matrices)
- inv_binds = [gltf.matrix_gltf_to_blender(m) for m in inv_binds]
- else:
- inv_binds = [Matrix.Identity(4) for i in range(len(pyskin.joints))]
- bind_mats = [gltf.vnodes[joint].bind_arma_mat for joint in pyskin.joints]
- joint_mats = [bind_mat @ inv_bind for bind_mat, inv_bind in zip(bind_mats, inv_binds)]
-
- def skin_vert(pos, pidx):
- out = Vector((0, 0, 0))
- # Spec says weights should already sum to 1 but some models
- # don't do it (ex. CesiumMan), so normalize.
- weight_sum = 0
- for joint_set, weight_set in zip(joint_sets, weight_sets):
- for j in range(0, 4):
- weight = weight_set[pidx][j]
- if weight != 0.0:
- weight_sum += weight
- joint = joint_set[pidx][j]
- out += weight * (joint_mats[joint] @ pos)
- out /= weight_sum
- return out
-
- def skin_normal(norm, pidx):
- # TODO: not sure this is right
- norm = Vector([norm[0], norm[1], norm[2], 0])
- out = Vector((0, 0, 0, 0))
- weight_sum = 0
- for joint_set, weight_set in zip(joint_sets, weight_sets):
- for j in range(0, 4):
- weight = weight_set[pidx][j]
- if weight != 0.0:
- weight_sum += weight
- joint = joint_set[pidx][j]
- out += weight * (joint_mats[joint] @ norm)
- out /= weight_sum
- out = out.to_3d().normalized()
- return out
-
- # Every vertex has an index into the primitive's attribute arrays and a
- # *different* index into the BMesh's list of verts. Call the first one the
- # pidx and the second the bidx. Need to keep them straight!
-
- # The pidx of all the vertices that are actually used by the primitive (only
- # indices that appear in the pyprimitive.indices list are actually used)
- used_pidxs = set(indices)
- # Contains a pair (bidx, pidx) for every vertex in the primitive
- vert_idxs = []
- # pidx_to_bidx[pidx] will be the bidx of the vertex with that pidx (or -1 if
- # unused)
- pidx_to_bidx = [-1] * len(positions)
- bidx = len(bme_verts)
- if bpy.app.debug:
- used_pidxs = list(used_pidxs)
- used_pidxs.sort()
- for pidx in used_pidxs:
- pos = gltf.loc_gltf_to_blender(positions[pidx])
- if skin_idx is not None:
- pos = skin_vert(pos, pidx)
-
- bme_verts.new(pos)
- vert_idxs.append((bidx, pidx))
- pidx_to_bidx[pidx] = bidx
- bidx += 1
- bme_verts.ensure_lookup_table()
-
- # Add edges/faces to bmesh
- mode = 4 if pyprimitive.mode is None else pyprimitive.mode
- edges, faces = BlenderPrimitive.edges_and_faces(mode, indices)
- # NOTE: edges and vertices are in terms of pidxs!
- for edge in edges:
- try:
- bme_edges.new((
- bme_verts[pidx_to_bidx[edge[0]]],
- bme_verts[pidx_to_bidx[edge[1]]],
- ))
- except ValueError:
- # Ignores duplicate/degenerate edges
- pass
- pyprimitive.num_faces = 0
- for face in faces:
- try:
- face = bme_faces.new((
- bme_verts[pidx_to_bidx[face[0]]],
- bme_verts[pidx_to_bidx[face[1]]],
- bme_verts[pidx_to_bidx[face[2]]],
- ))
-
- if material_index is not None:
- face.material_index = material_index
-
- pyprimitive.num_faces += 1
-
- except ValueError:
- # Ignores duplicate/degenerate faces
- pass
-
- # Set normals
- if 'NORMAL' in attributes:
- normals = BinaryData.get_data_from_accessor(gltf, attributes['NORMAL'], cache=True)
-
- if skin_idx is None:
- for bidx, pidx in vert_idxs:
- bme_verts[bidx].normal = gltf.normal_gltf_to_blender(normals[pidx])
- else:
- for bidx, pidx in vert_idxs:
- normal = gltf.normal_gltf_to_blender(normals[pidx])
- bme_verts[bidx].normal = skin_normal(normal, pidx)
-
- # Set vertex colors. Add them in the order COLOR_0, COLOR_1, etc.
- set_num = 0
- while 'COLOR_%d' % set_num in attributes:
- if set_num >= MAX_NUM_COLOR_SETS:
- gltf2_io_debug.print_console("WARNING",
- "too many color sets; COLOR_%d will be ignored" % set_num
- )
- break
-
- layer_name = 'Col' if set_num == 0 else 'Col.%03d' % set_num
- layer = BlenderPrimitive.get_layer(bme.loops.layers.color, layer_name)
-
- colors = BinaryData.get_data_from_accessor(gltf, attributes['COLOR_%d' % set_num], cache=True)
- is_rgba = len(colors[0]) == 4
-
- for bidx, pidx in vert_idxs:
- color = colors[pidx]
- col = (
- color_linear_to_srgb(color[0]),
- color_linear_to_srgb(color[1]),
- color_linear_to_srgb(color[2]),
- color[3] if is_rgba else 1.0,
- )
- for loop in bme_verts[bidx].link_loops:
- loop[layer] = col
-
- set_num += 1
-
- # Set texcoords
- set_num = 0
- while 'TEXCOORD_%d' % set_num in attributes:
- if set_num >= MAX_NUM_TEXCOORD_SETS:
- gltf2_io_debug.print_console("WARNING",
- "too many UV sets; TEXCOORD_%d will be ignored" % set_num
- )
- break
-
- layer_name = 'UVMap' if set_num == 0 else 'UVMap.%03d' % set_num
- layer = BlenderPrimitive.get_layer(bme.loops.layers.uv, layer_name)
-
- uvs = BinaryData.get_data_from_accessor(gltf, attributes['TEXCOORD_%d' % set_num], cache=True)
-
- for bidx, pidx in vert_idxs:
- # UV transform
- u, v = uvs[pidx]
- uv = (u, 1 - v)
-
- for loop in bme_verts[bidx].link_loops:
- loop[layer].uv = uv
-
- set_num += 1
-
- # Set joints/weights for skinning
- if joint_sets:
- layer = BlenderPrimitive.get_layer(bme.verts.layers.deform, 'Vertex Weights')
-
- for joint_set, weight_set in zip(joint_sets, weight_sets):
- for bidx, pidx in vert_idxs:
- for j in range(0, 4):
- weight = weight_set[pidx][j]
- if weight != 0.0:
- joint = joint_set[pidx][j]
- bme_verts[bidx][layer][joint] = weight
-
- # Set morph target positions (no normals/tangents)
- for sk, target in enumerate(pyprimitive.targets or []):
- if pymesh.shapekey_names[sk] is None:
- continue
-
- layer_name = pymesh.shapekey_names[sk]
- layer = BlenderPrimitive.get_layer(bme.verts.layers.shape, layer_name)
-
- morph_positions = BinaryData.get_data_from_accessor(gltf, target['POSITION'], cache=True)
-
- if skin_idx is None:
- for bidx, pidx in vert_idxs:
- bme_verts[bidx][layer] = (
- gltf.loc_gltf_to_blender(positions[pidx]) +
- gltf.loc_gltf_to_blender(morph_positions[pidx])
- )
- else:
- for bidx, pidx in vert_idxs:
- pos = (
- gltf.loc_gltf_to_blender(positions[pidx]) +
- gltf.loc_gltf_to_blender(morph_positions[pidx])
- )
- bme_verts[bidx][layer] = skin_vert(pos, pidx)
-
- @staticmethod
- def edges_and_faces(mode, indices):
- """Converts the indices in a particular primitive mode into standard lists of
- edges (pairs of indices) and faces (tuples of CCW indices).
- """
- es = []
- fs = []
-
- if mode == 0:
- # POINTS
- pass
- elif mode == 1:
- # LINES
- # 1 3
- # / /
- # 0 2
- es = [
- (indices[i], indices[i + 1])
- for i in range(0, len(indices), 2)
- ]
- elif mode == 2:
- # LINE LOOP
- # 1---2
- # / \
- # 0-------3
- es = [
- (indices[i], indices[i + 1])
- for i in range(0, len(indices) - 1)
- ]
- es.append((indices[-1], indices[0]))
- elif mode == 3:
- # LINE STRIP
- # 1---2
- # / \
- # 0 3
- es = [
- (indices[i], indices[i + 1])
- for i in range(0, len(indices) - 1)
- ]
- elif mode == 4:
- # TRIANGLES
- # 2 3
- # / \ / \
- # 0---1 4---5
- fs = [
- (indices[i], indices[i + 1], indices[i + 2])
- for i in range(0, len(indices), 3)
- ]
- elif mode == 5:
- # TRIANGLE STRIP
- # 0---2---4
- # \ / \ /
- # 1---3
- def alternate(i, xs):
- even = i % 2 == 0
- return xs if even else (xs[0], xs[2], xs[1])
- fs = [
- alternate(i, (indices[i], indices[i + 1], indices[i + 2]))
- for i in range(0, len(indices) - 2)
- ]
- elif mode == 6:
- # TRIANGLE FAN
- # 3---2
- # / \ / \
- # 4---0---1
- fs = [
- (indices[0], indices[i], indices[i + 1])
- for i in range(1, len(indices) - 1)
- ]
- else:
- raise Exception('primitive mode unimplemented: %d' % mode)
-
- return es, fs
diff --git a/io_scene_gltf2/io/com/gltf2_io_constants.py b/io_scene_gltf2/io/com/gltf2_io_constants.py
index 873e004e..983fe9ab 100755
--- a/io_scene_gltf2/io/com/gltf2_io_constants.py
+++ b/io_scene_gltf2/io/com/gltf2_io_constants.py
@@ -35,6 +35,18 @@ class ComponentType(IntEnum):
}[component_type]
@classmethod
+ def to_numpy_dtype(cls, component_type):
+ import numpy as np
+ return {
+ ComponentType.Byte: np.int8,
+ ComponentType.UnsignedByte: np.uint8,
+ ComponentType.Short: np.int16,
+ ComponentType.UnsignedShort: np.uint16,
+ ComponentType.UnsignedInt: np.uint32,
+ ComponentType.Float: np.float32,
+ }[component_type]
+
+ @classmethod
def from_legacy_define(cls, type_define):
return {
GLTF_COMPONENT_TYPE_BYTE: ComponentType.Byte,
diff --git a/io_scene_gltf2/io/imp/gltf2_io_binary.py b/io_scene_gltf2/io/imp/gltf2_io_binary.py
index 7cfcbc40..728cf0f0 100755
--- a/io_scene_gltf2/io/imp/gltf2_io_binary.py
+++ b/io_scene_gltf2/io/imp/gltf2_io_binary.py
@@ -13,6 +13,7 @@
# limitations under the License.
import struct
+import numpy as np
from ..com.gltf2_io import Accessor
@@ -22,8 +23,8 @@ class BinaryData():
def __new__(cls, *args, **kwargs):
raise RuntimeError("%s should not be instantiated" % cls)
-# Note that this function is not used in Blender importer, but is kept in
-# Source code to be used in any pipeline that want to manage gltf/glb file in python
+ # Note that this function is not used in Blender importer, but is kept in
+ # Source code to be used in any pipeline that want to manage gltf/glb file in python
@staticmethod
def get_binary_from_accessor(gltf, accessor_idx):
"""Get binary from accessor."""
@@ -63,8 +64,7 @@ class BinaryData():
if accessor_idx in gltf.accessor_cache:
return gltf.accessor_cache[accessor_idx]
- accessor = gltf.data.accessors[accessor_idx]
- data = BinaryData.get_data_from_accessor_obj(gltf, accessor)
+ data = BinaryData.decode_accessor(gltf, accessor_idx).tolist()
if cache:
gltf.accessor_cache[accessor_idx] = data
@@ -72,7 +72,36 @@ class BinaryData():
return data
@staticmethod
- def get_data_from_accessor_obj(gltf, accessor):
+ def decode_accessor(gltf, accessor_idx, cache=False):
+ """Decodes accessor to 2D numpy array (count x num_components)."""
+ if accessor_idx in gltf.decode_accessor_cache:
+ return gltf.accessor_cache[accessor_idx]
+
+ accessor = gltf.data.accessors[accessor_idx]
+ array = BinaryData.decode_accessor_obj(gltf, accessor)
+
+ if cache:
+ gltf.accessor_cache[accessor_idx] = array
+ # Prevent accidentally modifying cached arrays
+ array.flags.writeable = False
+
+ return array
+
+ @staticmethod
+ def decode_accessor_obj(gltf, accessor):
+ # MAT2/3 have special alignment requirements that aren't handled. But it
+ # doesn't matter because nothing uses them.
+ assert accessor.type not in ['MAT2', 'MAT3']
+
+ dtype = {
+ 5120: np.int8,
+ 5121: np.uint8,
+ 5122: np.int16,
+ 5123: np.uint16,
+ 5125: np.uint32,
+ 5126: np.float32,
+ }[accessor.component_type]
+
if accessor.buffer_view is not None:
bufferView = gltf.data.buffer_views[accessor.buffer_view]
buffer_data = BinaryData.get_buffer_view(gltf, accessor.buffer_view)
@@ -80,40 +109,45 @@ class BinaryData():
accessor_offset = accessor.byte_offset or 0
buffer_data = buffer_data[accessor_offset:]
- fmt_char = gltf.fmt_char_dict[accessor.component_type]
component_nb = gltf.component_nb_dict[accessor.type]
- fmt = '<' + (fmt_char * component_nb)
- default_stride = struct.calcsize(fmt)
-
- # Special layouts for certain formats; see the section about
- # data alignment in the glTF 2.0 spec.
- component_size = struct.calcsize('<' + fmt_char)
- if accessor.type == 'MAT2' and component_size == 1:
- fmt = '<FFxxFF'.replace('F', fmt_char)
- default_stride = 8
- elif accessor.type == 'MAT3' and component_size == 1:
- fmt = '<FFFxFFFxFFF'.replace('F', fmt_char)
- default_stride = 12
- elif accessor.type == 'MAT3' and component_size == 2:
- fmt = '<FFFxxFFFxxFFF'.replace('F', fmt_char)
- default_stride = 24
+ bytes_per_elem = dtype(1).nbytes
+ default_stride = bytes_per_elem * component_nb
stride = bufferView.byte_stride or default_stride
- # Decode
- unpack_from = struct.Struct(fmt).unpack_from
- data = [
- unpack_from(buffer_data, offset)
- for offset in range(0, accessor.count*stride, stride)
- ]
+ if stride == default_stride:
+ array = np.frombuffer(
+ buffer_data,
+ dtype=np.dtype(dtype).newbyteorder('<'),
+ count=accessor.count * component_nb,
+ )
+ array = array.reshape(accessor.count, component_nb)
+
+ else:
+ # The data looks like
+ # XXXppXXXppXXXppXXX
+ # where X are the components and p are padding.
+ # One XXXpp group is one stride's worth of data.
+ assert stride % bytes_per_elem == 0
+ elems_per_stride = stride // bytes_per_elem
+ num_elems = (accessor.count - 1) * elems_per_stride + component_nb
+
+ array = np.frombuffer(
+ buffer_data,
+ dtype=np.dtype(dtype).newbyteorder('<'),
+ count=num_elems,
+ )
+ assert array.strides[0] == bytes_per_elem
+ array = np.lib.stride_tricks.as_strided(
+ array,
+ shape=(accessor.count, component_nb),
+ strides=(stride, bytes_per_elem),
+ )
else:
# No buffer view; initialize to zeros
component_nb = gltf.component_nb_dict[accessor.type]
- data = [
- (0,) * component_nb
- for i in range(accessor.count)
- ]
+ array = np.zeros((accessor.count, component_nb), dtype=dtype)
if accessor.sparse:
sparse_indices_obj = Accessor.from_dict({
@@ -123,6 +157,9 @@ class BinaryData():
'componentType': accessor.sparse.indices.component_type,
'type': 'SCALAR',
})
+ sparse_indices = BinaryData.decode_accessor_obj(gltf, sparse_indices_obj)
+ sparse_indices = sparse_indices.reshape(len(sparse_indices))
+
sparse_values_obj = Accessor.from_dict({
'count': accessor.sparse.count,
'bufferView': accessor.sparse.values.buffer_view,
@@ -130,31 +167,26 @@ class BinaryData():
'componentType': accessor.component_type,
'type': accessor.type,
})
- sparse_indices = BinaryData.get_data_from_accessor_obj(gltf, sparse_indices_obj)
- sparse_values = BinaryData.get_data_from_accessor_obj(gltf, sparse_values_obj)
+ sparse_values = BinaryData.decode_accessor_obj(gltf, sparse_values_obj)
- # Apply sparse
- for i in range(accessor.sparse.count):
- data[sparse_indices[i][0]] = sparse_values[i]
+ if not array.flags.writeable:
+ array = array.copy()
+ array[sparse_indices] = sparse_values
# Normalization
if accessor.normalized:
- for idx, tuple in enumerate(data):
- new_tuple = ()
- for i in tuple:
- if accessor.component_type == 5120:
- new_tuple += (max(float(i / 127.0 ), -1.0),)
- elif accessor.component_type == 5121:
- new_tuple += (float(i / 255.0),)
- elif accessor.component_type == 5122:
- new_tuple += (max(float(i / 32767.0), -1.0),)
- elif accessor.component_type == 5123:
- new_tuple += (i / 65535.0,)
- else:
- new_tuple += (float(i),)
- data[idx] = new_tuple
-
- return data
+ if accessor.component_type == 5120: # int8
+ array = np.maximum(-1.0, array / 127.0)
+ elif accessor.component_type == 5121: # uint8
+ array = array / 255.0
+ elif accessor.component_type == 5122: # int16
+ array = np.maximum(-1.0, array / 32767.0)
+ elif accessor.component_type == 5123: # uint16
+ array = array / 65535.0
+ else:
+ array = array.astype(np.float64)
+
+ return array
@staticmethod
def get_image_data(gltf, img_idx):
diff --git a/io_scene_gltf2/io/imp/gltf2_io_gltf.py b/io_scene_gltf2/io/imp/gltf2_io_gltf.py
index e63f1f55..49eee2d5 100755
--- a/io_scene_gltf2/io/imp/gltf2_io_gltf.py
+++ b/io_scene_gltf2/io/imp/gltf2_io_gltf.py
@@ -32,6 +32,7 @@ class glTFImporter():
self.glb_buffer = None
self.buffers = {}
self.accessor_cache = {}
+ self.decode_accessor_cache = {}
if 'loglevel' not in self.import_settings.keys():
self.import_settings['loglevel'] = logging.ERROR
@@ -47,6 +48,7 @@ class glTFImporter():
'KHR_materials_unlit',
'KHR_texture_transform',
'KHR_materials_clearcoat',
+ 'KHR_mesh_quantization',
]
# TODO : merge with io_constants