Welcome to mirror list, hosted at ThFree Co, Russian Federation.

git.blender.org/blender-addons.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJulien Duroure <julien.duroure@gmail.com>2019-04-09 19:49:11 +0300
committerJulien Duroure <julien.duroure@gmail.com>2019-04-09 19:49:11 +0300
commit8f48a57eac58b45fca34d39d7a52da9278be4009 (patch)
treed798e48d9c797c8988feb79b258cdc6ca80e7ab7 /io_scene_gltf2/blender/exp
parent8e72572153ed7166c284598c53af1e0ab4937263 (diff)
glTF exporter: fix / enhancement of animation export
Diffstat (limited to 'io_scene_gltf2/blender/exp')
-rwxr-xr-xio_scene_gltf2/blender/exp/gltf2_blender_gather_animation_channels.py7
-rwxr-xr-xio_scene_gltf2/blender/exp/gltf2_blender_gather_animation_sampler_keyframes.py60
-rwxr-xr-xio_scene_gltf2/blender/exp/gltf2_blender_gather_animation_samplers.py53
-rwxr-xr-xio_scene_gltf2/blender/exp/gltf2_blender_get.py19
4 files changed, 104 insertions, 35 deletions
diff --git a/io_scene_gltf2/blender/exp/gltf2_blender_gather_animation_channels.py b/io_scene_gltf2/blender/exp/gltf2_blender_gather_animation_channels.py
index 808c970d..33962ee0 100755
--- a/io_scene_gltf2/blender/exp/gltf2_blender_gather_animation_channels.py
+++ b/io_scene_gltf2/blender/exp/gltf2_blender_gather_animation_channels.py
@@ -21,6 +21,7 @@ from io_scene_gltf2.io.com import gltf2_io_debug
from io_scene_gltf2.blender.exp.gltf2_blender_gather_cache import cached
from io_scene_gltf2.blender.exp import gltf2_blender_gather_animation_samplers
from io_scene_gltf2.blender.exp import gltf2_blender_gather_animation_channel_target
+from io_scene_gltf2.blender.exp import gltf2_blender_get
@cached
@@ -104,8 +105,8 @@ def __get_channel_groups(blender_action: bpy.types.Action, blender_object: bpy.t
target = blender_object
else:
try:
- target = blender_object.path_resolve(object_path)
- except ValueError:
+ target = gltf2_blender_get.get_object_from_datapath(blender_object, object_path)
+ except ValueError as e:
# if the object is a mesh and the action target path can not be resolved, we know that this is a morph
# animation.
if blender_object.type == "MESH":
@@ -113,7 +114,7 @@ def __get_channel_groups(blender_action: bpy.types.Action, blender_object: bpy.t
# shape_key = blender_object.data.shape_keys.path_resolve(object_path)
target = blender_object.data.shape_keys
else:
- gltf2_io_debug.print_console("WARNING", "Can not export animations with target {}".format(object_path))
+ gltf2_io_debug.print_console("WARNING", "Animation target {} not found".format(object_path))
continue
# group channels by target object and affected property of the target
diff --git a/io_scene_gltf2/blender/exp/gltf2_blender_gather_animation_sampler_keyframes.py b/io_scene_gltf2/blender/exp/gltf2_blender_gather_animation_sampler_keyframes.py
index 770838e1..90b73a9e 100755
--- a/io_scene_gltf2/blender/exp/gltf2_blender_gather_animation_sampler_keyframes.py
+++ b/io_scene_gltf2/blender/exp/gltf2_blender_gather_animation_sampler_keyframes.py
@@ -18,6 +18,8 @@ import typing
from io_scene_gltf2.blender.exp.gltf2_blender_gather_cache import cached
from io_scene_gltf2.blender.com import gltf2_blender_math
+from io_scene_gltf2.blender.exp import gltf2_blender_get
+from io_scene_gltf2.blender.exp import gltf2_blender_extract
from . import gltf2_blender_export_keys
from io_scene_gltf2.io.com import gltf2_io_debug
@@ -25,7 +27,9 @@ from io_scene_gltf2.io.com import gltf2_io_debug
class Keyframe:
def __init__(self, channels: typing.Tuple[bpy.types.FCurve], time: float):
self.seconds = time / bpy.context.scene.render.fps
- self.__target = channels[0].data_path.split('.')[-1]
+ self.frame = time
+ self.fps = bpy.context.scene.render.fps
+ self.target = channels[0].data_path.split('.')[-1]
self.__indices = [c.array_index for c in channels]
# Data holders for virtual properties
@@ -43,16 +47,16 @@ class Keyframe:
"rotation_quaternion": 4,
"scale": 3,
"value": 1
- }.get(self.__target)
+ }.get(self.target)
if length is None:
- raise RuntimeError("Animations with target type '{}' are not supported.".format(self.__target))
+ raise RuntimeError("Animations with target type '{}' are not supported.".format(self.target))
return length
def __set_indexed(self, value):
# 'value' targets don't use keyframe.array_index
- if self.__target == "value":
+ if self.target == "value":
return value
# Sometimes blender animations only reference a subset of components of a data target. Keyframe should always
# contain a complete Vector/ Quaternion --> use the array_index value of the keyframe to set components in such
@@ -60,7 +64,7 @@ class Keyframe:
result = [0.0] * self.__get_target_len()
for i, v in zip(self.__indices, value):
result[i] = v
- result = gltf2_blender_math.list_to_mathutils(result, self.__target)
+ result = gltf2_blender_math.list_to_mathutils(result, self.target)
return result
@property
@@ -90,8 +94,9 @@ class Keyframe:
# cache for performance reasons
@cached
-def gather_keyframes(channels: typing.Tuple[bpy.types.FCurve], export_settings) \
- -> typing.List[Keyframe]:
+def gather_keyframes(blender_object_if_armature: typing.Optional[bpy.types.Object],
+ channels: typing.Tuple[bpy.types.FCurve],
+ export_settings) -> typing.List[Keyframe]:
"""Convert the blender action groups' fcurves to keyframes for use in glTF."""
# Find the start and end of the whole action group
ranges = [channel.range() for channel in channels]
@@ -100,15 +105,36 @@ def gather_keyframes(channels: typing.Tuple[bpy.types.FCurve], export_settings)
end = max([channel.range()[1] for channel in channels])
keyframes = []
- if needs_baking(channels, export_settings):
- # Bake the animation, by evaluating it at a high frequency
+ if needs_baking(blender_object_if_armature, channels, export_settings):
+ # Bake the animation, by evaluating the animation for all frames
# TODO: maybe baking can also be done with FCurve.convert_to_samples
+
+ if blender_object_if_armature is not None:
+ pose_bone_if_armature = gltf2_blender_get.get_object_from_datapath(blender_object_if_armature,
+ channels[0].data_path)
+ else:
+ pose_bone_if_armature = None
+
+ # sample all frames
time = start
- # TODO: make user controllable
step = 1.0 / bpy.context.scene.render.fps
while time <= end:
key = Keyframe(channels, time)
- key.value = [c.evaluate(time) for c in channels]
+ if isinstance(pose_bone_if_armature, bpy.types.PoseBone):
+ # we need to bake in the constraints
+ bpy.context.scene.frame_set(time)
+ trans, rot, scale = pose_bone_if_armature.matrix_basis.decompose()
+ target_property = channels[0].data_path.split('.')[-1]
+ key.value = {
+ "location": trans,
+ "rotation_axis_angle": rot,
+ "rotation_euler": rot,
+ "rotation_quaternion": rot,
+ "scale": scale
+ }[target_property]
+
+ else:
+ key.value = [c.evaluate(time) for c in channels]
keyframes.append(key)
time += step
else:
@@ -153,7 +179,8 @@ def gather_keyframes(channels: typing.Tuple[bpy.types.FCurve], export_settings)
return keyframes
-def needs_baking(channels: typing.Tuple[bpy.types.FCurve],
+def needs_baking(blender_object_if_armature: typing.Optional[bpy.types.Object],
+ channels: typing.Tuple[bpy.types.FCurve],
export_settings
) -> bool:
"""
@@ -201,5 +228,14 @@ def needs_baking(channels: typing.Tuple[bpy.types.FCurve],
"Baking animation because of differently located keyframes in one channel")
return True
+ if blender_object_if_armature is not None:
+ animation_target = gltf2_blender_get.get_object_from_datapath(blender_object_if_armature, channels[0].data_path)
+ if isinstance(animation_target, bpy.types.PoseBone):
+ if len(animation_target.constraints) != 0:
+ # Constraints such as IK act on the bone -> can not be represented in glTF atm
+ gltf2_io_debug.print_console("WARNING",
+ "Baking animation because of unsupported constraints acting on the bone")
+ return True
+
return False
diff --git a/io_scene_gltf2/blender/exp/gltf2_blender_gather_animation_samplers.py b/io_scene_gltf2/blender/exp/gltf2_blender_gather_animation_samplers.py
index b4fda3c6..4fedd469 100755
--- a/io_scene_gltf2/blender/exp/gltf2_blender_gather_animation_samplers.py
+++ b/io_scene_gltf2/blender/exp/gltf2_blender_gather_animation_samplers.py
@@ -22,6 +22,7 @@ from io_scene_gltf2.blender.com.gltf2_blender_data_path import get_target_proper
from io_scene_gltf2.blender.exp import gltf2_blender_gather_animation_sampler_keyframes
from io_scene_gltf2.blender.exp.gltf2_blender_gather_cache import cached
from io_scene_gltf2.blender.exp import gltf2_blender_gather_accessors
+from io_scene_gltf2.blender.exp import gltf2_blender_get
from io_scene_gltf2.io.com import gltf2_io
from io_scene_gltf2.io.com import gltf2_io_constants
from io_scene_gltf2.io.exp import gltf2_io_binary_data
@@ -33,26 +34,27 @@ def gather_animation_sampler(channels: typing.Tuple[bpy.types.FCurve],
blender_object: bpy.types.Object,
export_settings
) -> gltf2_io.AnimationSampler:
+ blender_object_if_armature = blender_object if blender_object.type == "ARMATURE" else None
return gltf2_io.AnimationSampler(
- extensions=__gather_extensions(channels, blender_object, export_settings),
- extras=__gather_extras(channels, blender_object, export_settings),
- input=__gather_input(channels, export_settings),
- interpolation=__gather_interpolation(channels, blender_object, export_settings),
+ extensions=__gather_extensions(channels, blender_object_if_armature, export_settings),
+ extras=__gather_extras(channels, blender_object_if_armature, export_settings),
+ input=__gather_input(channels, blender_object_if_armature, export_settings),
+ interpolation=__gather_interpolation(channels, blender_object_if_armature, export_settings),
output=__gather_output(channels, blender_object.matrix_parent_inverse.copy().freeze(),
- blender_object if blender_object.type == "ARMATURE" else None,
+ blender_object_if_armature,
export_settings)
)
def __gather_extensions(channels: typing.Tuple[bpy.types.FCurve],
- blender_object: bpy.types.Object,
+ blender_object_if_armature: typing.Optional[bpy.types.Object],
export_settings
) -> typing.Any:
return None
def __gather_extras(channels: typing.Tuple[bpy.types.FCurve],
- blender_object: bpy.types.Object,
+ blender_object_if_armature: typing.Optional[bpy.types.Object],
export_settings
) -> typing.Any:
return None
@@ -60,10 +62,13 @@ def __gather_extras(channels: typing.Tuple[bpy.types.FCurve],
@cached
def __gather_input(channels: typing.Tuple[bpy.types.FCurve],
+ blender_object_if_armature: typing.Optional[bpy.types.Object],
export_settings
) -> gltf2_io.Accessor:
"""Gather the key time codes."""
- keyframes = gltf2_blender_gather_animation_sampler_keyframes.gather_keyframes(channels, export_settings)
+ keyframes = gltf2_blender_gather_animation_sampler_keyframes.gather_keyframes(blender_object_if_armature,
+ channels,
+ export_settings)
times = [k.seconds for k in keyframes]
return gltf2_blender_gather_accessors.gather_accessor(
@@ -78,10 +83,12 @@ def __gather_input(channels: typing.Tuple[bpy.types.FCurve],
def __gather_interpolation(channels: typing.Tuple[bpy.types.FCurve],
- blender_object: bpy.types.Object,
+ blender_object_if_armature: typing.Optional[bpy.types.Object],
export_settings
) -> str:
- if gltf2_blender_gather_animation_sampler_keyframes.needs_baking(channels, export_settings):
+ if gltf2_blender_gather_animation_sampler_keyframes.needs_baking(blender_object_if_armature,
+ channels,
+ export_settings):
return 'STEP'
blender_keyframe = channels[0].keyframe_points[0]
@@ -101,7 +108,9 @@ def __gather_output(channels: typing.Tuple[bpy.types.FCurve],
export_settings
) -> gltf2_io.Accessor:
"""Gather the data of the keyframes."""
- keyframes = gltf2_blender_gather_animation_sampler_keyframes.gather_keyframes(channels, export_settings)
+ keyframes = gltf2_blender_gather_animation_sampler_keyframes.gather_keyframes(blender_object_if_armature,
+ channels,
+ export_settings)
target_datapath = channels[0].data_path
@@ -109,28 +118,31 @@ def __gather_output(channels: typing.Tuple[bpy.types.FCurve],
is_yup = export_settings[gltf2_blender_export_keys.YUP]
+ # bone animations need to be handled differently as they are in a different coordinate system
object_path = get_target_object_path(target_datapath)
is_armature_animation = blender_object_if_armature is not None and object_path != ""
+
if is_armature_animation:
- bone = blender_object_if_armature.path_resolve(object_path)
+ bone = gltf2_blender_get.get_object_from_datapath(blender_object_if_armature, object_path)
if isinstance(bone, bpy.types.PoseBone):
- axis_basis_change = mathutils.Matrix.Identity(4)
- if export_settings[gltf2_blender_export_keys.YUP]:
- axis_basis_change = mathutils.Matrix(
- ((1.0, 0.0, 0.0, 0.0), (0.0, 0.0, 1.0, 0.0), (0.0, -1.0, 0.0, 0.0), (0.0, 0.0, 0.0, 1.0)))
-
- # extract bone transform
if bone.parent is None:
+ axis_basis_change = mathutils.Matrix.Identity(4)
+ if export_settings[gltf2_blender_export_keys.YUP]:
+ axis_basis_change = mathutils.Matrix(
+ ((1.0, 0.0, 0.0, 0.0),
+ (0.0, 0.0, 1.0, 0.0),
+ (0.0, -1.0, 0.0, 0.0),
+ (0.0, 0.0, 0.0, 1.0)))
correction_matrix_local = gltf2_blender_math.multiply(axis_basis_change, bone.bone.matrix_local)
else:
correction_matrix_local = gltf2_blender_math.multiply(
bone.parent.bone.matrix_local.inverted(), bone.bone.matrix_local)
- transform = gltf2_blender_math.multiply(correction_matrix_local, bone.matrix_basis)
+ transform = correction_matrix_local
values = []
for keyframe in keyframes:
- # Transform the data and extract
+ # Transform the data and build gltf control points
value = gltf2_blender_math.transform(keyframe.value, target_datapath, transform)
if is_yup and not is_armature_animation:
value = gltf2_blender_math.swizzle_yup(value, target_datapath)
@@ -156,6 +168,7 @@ def __gather_output(channels: typing.Tuple[bpy.types.FCurve],
values += keyframe_value
+ # store the keyframe data in a binary buffer
component_type = gltf2_io_constants.ComponentType.Float
if get_target_property_name(target_datapath) == "value":
# channels with 'weight' targets must have scalar accessors
diff --git a/io_scene_gltf2/blender/exp/gltf2_blender_get.py b/io_scene_gltf2/blender/exp/gltf2_blender_get.py
index fe3e0e20..27d8a341 100755
--- a/io_scene_gltf2/blender/exp/gltf2_blender_get.py
+++ b/io_scene_gltf2/blender/exp/gltf2_blender_get.py
@@ -23,6 +23,25 @@ def get_animation_target(action_group: bpy.types.ActionGroup):
return action_group.channels[0].data_path.split('.')[-1]
+def get_object_from_datapath(blender_object, data_path: str):
+ if "." in data_path:
+ # gives us: ('modifiers["Subsurf"]', 'levels')
+ path_prop, path_attr = data_path.rsplit(".", 1)
+
+ # same as: prop = obj.modifiers["Subsurf"]
+ if path_attr in ["rotation", "scale", "location",
+ "rotation_axis_angle", "rotation_euler", "rotation_quaternion"]:
+ prop = blender_object.path_resolve(path_prop)
+ else:
+ prop = blender_object.path_resolve(data_path)
+ else:
+ prop = blender_object
+ # single attribute such as name, location... etc
+ # path_attr = data_path
+
+ return prop
+
+
def get_socket_or_texture_slot(blender_material: bpy.types.Material, name: str):
"""
For a given material input name, retrieve the corresponding node tree socket or blender render texture slot.