Welcome to mirror list, hosted at ThFree Co, Russian Federation.

github.com/WolfireGames/overgrowth.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMax Danielsson <max@autious.net>2022-04-28 14:23:16 +0300
committerMax Danielsson <max@autious.net>2022-04-28 14:24:00 +0300
commita7bb7fe9df60a71ca6f3d0264d5ff2c3aec3bacb (patch)
tree05523765958457dfaa49f1c9088f01f04631783c
parentc34b5bbc73d1abcc5c400decb526ae7050f40771 (diff)
Add BlenderScript files from the Auxiliary repo
-rw-r--r--Data/BlenderScript/2.49/phxbn_import.py284
-rw-r--r--Data/BlenderScript/2.71/test.py101
-rw-r--r--Data/BlenderScript/addons/export_physics.py85
-rw-r--r--Data/BlenderScript/addons/fixedconstraint.py72
-rw-r--r--Data/BlenderScript/addons/genericconstraint.py102
-rw-r--r--Data/BlenderScript/addons/hingeconstraint.py74
-rw-r--r--Data/BlenderScript/addons/io_anm/__init__.py38
-rw-r--r--Data/BlenderScript/addons/io_anm/anm_types.py64
-rw-r--r--Data/BlenderScript/addons/io_anm/export_anm.py822
-rw-r--r--Data/BlenderScript/addons/io_anm/import_anm.py433
-rw-r--r--Data/BlenderScript/addons/io_canm/__init__.py29
-rw-r--r--Data/BlenderScript/addons/io_canm/export_canm.py136
-rw-r--r--Data/BlenderScript/addons/io_obj_shape/__init__.py38
-rw-r--r--Data/BlenderScript/addons/io_obj_shape/export_obj_shape.py26
-rw-r--r--Data/BlenderScript/addons/io_obj_shape/import_obj_shape.py47
-rw-r--r--Data/BlenderScript/addons/io_phxbn/__init__.py38
-rw-r--r--Data/BlenderScript/addons/io_phxbn/export_phxbn.py608
-rw-r--r--Data/BlenderScript/addons/io_phxbn/import_phxbn.py560
-rw-r--r--Data/BlenderScript/addons/io_phxbn/phxbn_types.py42
-rw-r--r--Data/BlenderScript/addons/symmetricalize.py79
-rw-r--r--Data/BlenderScript/io/export_obj.py989
-rw-r--r--Data/BlenderScript/lipsyncbatch.au340
22 files changed, 4707 insertions, 0 deletions
diff --git a/Data/BlenderScript/2.49/phxbn_import.py b/Data/BlenderScript/2.49/phxbn_import.py
new file mode 100644
index 00000000..4f6d8d0d
--- /dev/null
+++ b/Data/BlenderScript/2.49/phxbn_import.py
@@ -0,0 +1,284 @@
+#!BPY
+
+"""
+Name: 'Phoenix bones (.phxbn)...'
+Blender: 249
+Group: 'Import'
+Tip: 'Import a (.phxbn) skeleton file'
+"""
+
+__author__ = "Campbell Barton"
+__url__ = ("blender.org", "blenderartists.org")
+__version__ = "1.90 06/08/01"
+
+__bpydoc__ = """\
+This script imports PHXBN skeleton files to Blender.
+"""
+
+import Blender
+import bpy
+import BPyMessages
+import array
+Vector= Blender.Mathutils.Vector
+Euler= Blender.Mathutils.Euler
+Matrix= Blender.Mathutils.Matrix
+RotationMatrix = Blender.Mathutils.RotationMatrix
+TranslationMatrix= Blender.Mathutils.TranslationMatrix
+
+DEG2RAD = 0.017453292519943295
+
+class PHXBNdata:
+ def __init__(self):
+ self.vertices = []
+ self.bones = []
+ self.parents = []
+ self.bone_parents = []
+ self.bone_weights = array.array('f')
+ self.bone_ids = array.array('f')
+
+def ReadPHXBN(file_path, mesh_obj):
+ # File loading stuff
+ # Open the file for importing
+ file = open(file_path, 'rb')
+
+ print "\nLoading phxbn file: ", file_path
+ version = array.array('l')
+ version.fromfile(file, 1)
+ print 'Version: ', version[0], '\n'
+
+ rigging_stage = array.array('l')
+ rigging_stage.fromfile(file, 1)
+ #print 'Rigging stage: ', rigging_stage[0], '\n'
+
+ num_points = array.array('l')
+ num_points.fromfile(file, 1)
+ #print 'Num points: ', num_points[0], '\n'
+
+ data = PHXBNdata();
+
+ for i in range(0,num_points[0]):
+ vertex = array.array('f')
+ vertex.fromfile(file, 3)
+ data.vertices.append(vertex)
+
+ for i in range(0,num_points[0]):
+ parent = array.array('l')
+ parent.fromfile(file, 1)
+ data.parents.append(parent[0])
+
+ num_bones = array.array('l')
+ num_bones.fromfile(file, 1)
+ #print 'Num bones: ', num_bones[0], '\n'
+
+ for i in range(0,num_bones[0]):
+ bone = array.array('l')
+ bone.fromfile(file, 2)
+ data.bones.append(bone)
+
+ for i in range(0,num_bones[0]):
+ bone_parent = array.array('l')
+ bone_parent.fromfile(file, 1)
+ data.bone_parents.append(bone_parent[0])
+
+ bone_mass = array.array('f')
+ bone_mass.fromfile(file, num_bones[0])
+
+ bone_com = array.array('f')
+ bone_com.fromfile(file, num_bones[0]*3)
+
+ print bone_com[0]
+
+ mesh = mesh_obj.getData()
+
+ if rigging_stage[0] == 1:
+ num_faces = len(mesh.faces)
+ num_verts = num_faces * 3
+ print "Loading ", num_verts, " bone weights and ids"
+ file_bone_weights = array.array('f')
+ file_bone_ids = array.array('f')
+ file_bone_weights.fromfile(file, num_verts*4)
+ file_bone_ids.fromfile(file, num_verts*4)
+
+ num_verts = len(mesh.verts)
+ data.bone_weights = [0 for i in xrange(num_verts*4)]
+ data.bone_ids = [0 for i in xrange(num_verts*4)]
+ for face_id in xrange(num_faces):
+ for face_vert_num in xrange(3):
+ for i in xrange(4):
+ data.bone_weights[mesh.faces[face_id].v[face_vert_num].index*4+i] =file_bone_weights[face_id*12 + face_vert_num * 4 + i]
+ data.bone_ids[mesh.faces[face_id].v[face_vert_num].index*4+i] =file_bone_ids[face_id*12 + face_vert_num * 4 + i]
+
+ #print 'Vertices: ', data.vertices, '\n'
+
+ min_vec = Vector(mesh.verts[0].co)
+ max_vec = Vector(mesh.verts[0].co)
+ for vert in mesh.verts:
+ min_vec[0] = min(min_vec[0], vert.co[0])
+ min_vec[1] = min(min_vec[1], vert.co[1])
+ min_vec[2] = min(min_vec[2], vert.co[2])
+ max_vec[0] = max(max_vec[0], vert.co[0])
+ max_vec[1] = max(max_vec[1], vert.co[1])
+ max_vec[2] = max(max_vec[2], vert.co[2])
+
+ Blender.Window.EditMode(0)
+ center = (min_vec + max_vec)*0.5
+ print min_vec
+ print max_vec
+ print center
+ for vert in mesh.verts:
+ vert.co[0] = vert.co[0] - center[0]
+ vert.co[1] = vert.co[1] - center[1]
+ vert.co[2] = vert.co[2] - center[2]
+ mesh.update()
+
+ return data
+
+
+#=============#
+# TESTING #
+#=============#
+
+#('/metavr/mocap/bvh/boxer.bvh')
+#('/d/staggered_walk.bvh')
+#('/metavr/mocap/bvh/dg-306-g.bvh') # Incompleate EOF
+#('/metavr/mocap/bvh/wa8lk.bvh') # duplicate joint names, \r line endings.
+#('/metavr/mocap/bvh/walk4.bvh') # 0 channels
+
+'''
+import os
+DIR = '/metavr/mocap/bvh/'
+for f in ('/d/staggered_walk.bvh',):
+ #for f in os.listdir(DIR)[5:6]:
+ #for f in os.listdir(DIR):
+ if f.endswith('.bvh'):
+ s = Blender.Scene.New(f)
+ s.makeCurrent()
+ #file= DIR + f
+ file= f
+ print f
+ bvh_nodes= read_bvh(file, 1.0)
+ bvh_node_dict2armature(bvh_nodes, 1)
+'''
+
+def UI(file, PREF_UI= True):
+
+ if BPyMessages.Error_NoFile(file):
+ return
+
+ Draw= Blender.Draw
+
+ print 'Attempting import PHXBN:\n', file, '\n'
+
+ Blender.Window.WaitCursor(1)
+ # Get the BVH data and act on it.
+ t1= Blender.sys.time()
+ print 'Parsing phxbn...\n',
+ data = ReadPHXBN(file)
+ print '%.4f' % (Blender.sys.time()-t1)
+ t1= Blender.sys.time()
+ '''
+ print '\timporting to blender...',
+ if IMPORT_AS_ARMATURE: bvh_node_dict2armature(bvh_nodes, IMPORT_START_FRAME, IMPORT_LOOP)
+ if IMPORT_AS_EMPTIES: bvh_node_dict2objects(bvh_nodes, IMPORT_START_FRAME, IMPORT_LOOP)
+ '''
+ print 'Done in %.4f\n' % (Blender.sys.time()-t1)
+ Blender.Window.WaitCursor(0)
+
+def AddArmature(data, mesh_obj):
+ scn = bpy.data.scenes.active
+ arm_data = bpy.data.armatures.new()
+ arm_obj = scn.objects.new(arm_data)
+ arm_data.makeEditable()
+
+ num = 0
+ for data_bone in data.bones:
+ bone = Blender.Armature.Editbone()
+ bone.head = Vector(data.vertices[data_bone[0]][0],
+ data.vertices[data_bone[0]][1],
+ data.vertices[data_bone[0]][2])
+ bone.tail = Vector(data.vertices[data_bone[1]][0],
+ data.vertices[data_bone[1]][1],
+ data.vertices[data_bone[1]][2])
+ arm_data.bones["Bone_"+str(num)] = bone
+ num = num + 1
+
+
+ num = 0
+ for bone_parent in data.bone_parents:
+ name = "Bone_"+str(num);
+ parent_name = "Bone_"+str(bone_parent)
+ if bone_parent != -1:
+ arm_data.bones[name].parent = arm_data.bones[parent_name]
+ num = num + 1
+
+ arm_data.update()
+ arm_obj.link(arm_data)
+
+ vertgroup_created=[]
+ for bone in data.bones:
+ vertgroup_created.append(0)
+
+ mesh = mesh_obj.getData()
+ index = 0
+ vert_index = 0
+ for vert in mesh.verts:
+ for bone_num in range(0,4):
+ bone_id = int(data.bone_ids[index])
+ bone_weight = data.bone_weights[index]
+ name = "Bone_"+str(bone_id);
+ if vertgroup_created[bone_id]==0:
+ vertgroup_created[bone_id]=1
+ mesh.addVertGroup(name)
+ #assign the weight for this vertex
+ mesh.assignVertsToGroup(name, [vert_index], bone_weight, 'replace')
+ index = index + 1
+ vert_index = vert_index + 1
+ mesh.update()
+
+ arm_obj.makeParentDeform([mesh_obj], 0, 0)
+ arm_obj.drawMode = Blender.Object.DrawModes.XRAY
+
+def ReadObj(path):
+ name = path.split('\\')[-1].split('/')[-1]
+ mesh = Blender.NMesh.New( name )
+ file = open(path, 'r')
+ num_verts = 0;
+ for line in file:
+ words = line.split()
+ if len(words) == 0 or words[0].startswith('#'):
+ pass
+ elif words[0] == 'v':
+ x, y, z = float(words[1]), float(words[2]), float(words[3])
+ mesh.verts.append(Blender.NMesh.Vert(x, y, z))
+ num_verts = num_verts + 1
+ elif words[0] == 'f':
+ is_good = True;
+ faceVertList = []
+ for index_group in words[1:]:
+ index = index_group.split('/')[0]
+ if(int(index) > num_verts):
+ is_good = False
+ else:
+ faceVert = mesh.verts[int(index)-1]
+ faceVertList.append(faceVert)
+ if is_good:
+ newFace = Blender.NMesh.Face(faceVertList)
+ mesh.addFace(newFace)
+ ob = Blender.Object.New('Mesh', name)
+ ob.link(mesh) # tell the object to use the mesh we just made
+ scn = Blender.Scene.GetCurrent()
+ scn.link(ob)
+
+ return ob
+
+def main():
+ mesh_obj = ReadObj("C:\\Users\\David\\Desktop\\Wolfire SVN\\Project\\Data\\Models\\Characters\\IGF_Guard\\IGF_Guard.obj");
+ phxbn_data = ReadPHXBN("C:\\Users\\David\\Desktop\\Wolfire SVN\\Project\\Data\\Skeletons\\test.phxbn", mesh_obj);
+ AddArmature(phxbn_data, mesh_obj)
+
+ Blender.Redraw()
+ #Blender.Window.FileSelector(UI, 'Import PHXBN', '*.phxbn')
+
+if __name__ == '__main__':
+ #def foo():
+ main() \ No newline at end of file
diff --git a/Data/BlenderScript/2.71/test.py b/Data/BlenderScript/2.71/test.py
new file mode 100644
index 00000000..2d32dfe6
--- /dev/null
+++ b/Data/BlenderScript/2.71/test.py
@@ -0,0 +1,101 @@
+import bpy
+from xml.etree import cElementTree as ET
+
+print("\nRunning character loader:")
+
+working_dir = "C:/Users/David/Desktop/WolfireSVN/"
+
+actor_xml_path = working_dir + 'Data/Objects/IGF_Characters/IGF_GuardActor.xml'
+print("Loading actor file: "+actor_xml_path)
+xml_root = ET.parse(actor_xml_path).getroot()
+
+character_xml_path = None
+for element in xml_root:
+ if element.tag == "Character":
+ character_xml_path = element.text
+
+object_xml_path = None
+skeleton_xml_path = None
+if character_xml_path:
+ print("Loading character file: "+working_dir+character_xml_path)
+ xml_root = ET.parse(working_dir+character_xml_path).getroot()
+ for element in xml_root:
+ if(element.tag == "appearance"):
+ object_xml_path = element.get("obj_path")
+ skeleton_xml_path = element.get("skeleton")
+
+model_path = None
+color_path = None
+normal_path = None
+palette_map_path = None
+shader_name = None
+if object_xml_path:
+ print("Loading object file: "+working_dir+object_xml_path)
+ xml_root = ET.parse(working_dir+object_xml_path).getroot()
+ for element in xml_root:
+ if(element.tag == "Model"):
+ model_path = element.text
+ if(element.tag == "ColorMap"):
+ color_path = element.text
+ if(element.tag == "NormalMap"):
+ normal_path = element.text
+ if(element.tag == "PaletteMap"):
+ palette_map_path = element.text
+ if(element.tag == "ShaderName"):
+ shader_name = element.text
+
+bone_path = None
+if skeleton_xml_path:
+ print("Loading skeleton file: "+working_dir+skeleton_xml_path)
+ xml_root = ET.parse(working_dir+skeleton_xml_path).getroot()
+ print(xml_root)
+ model_path = xml_root.get("model_path")
+ bone_path = xml_root.get("bone_path")
+
+'''if model_path:
+ print("Model path: "+working_dir+model_path)
+ bpy.ops.import_scene.obj(filepath=(working_dir+model_path))
+'''
+
+_min_skeleton_version = 6
+
+import struct
+if bone_path:
+ print("Bone path: "+working_dir+bone_path)
+ with open(working_dir+bone_path, mode='rb') as file:
+ contents = file.read()
+ file.close()
+ cursor = 0;
+ temp_read = struct.unpack("i", contents[cursor:cursor+4])
+ cursor += 4
+ version = 5
+ if temp_read[0] >= _min_skeleton_version:
+ version = temp_read[0];
+ temp_read = struct.unpack("i", contents[cursor:cursor+4])
+ cursor += 4
+ print("Version: "+str(version))
+ print("Rigging stage: "+str(temp_read[0]))
+ num_points = struct.unpack("i", contents[cursor:cursor+4])[0]
+ cursor += 4
+ print("Num points: "+str(num_points))
+ points = []
+ for i in range(0, num_points):
+ points += struct.unpack("fff", contents[cursor:cursor+12])
+ cursor += 12
+
+ point_parents = []
+ if version >= 8:
+ for i in range(0, num_points):
+ point_parents += struct.unpack("i", contents[cursor:cursor+4])
+ cursor += 4
+
+ num_bones = struct.unpack("i", contents[cursor:cursor+4])[0]
+ cursor += 4
+ print("Num bones: "+str(num_bones))
+ bone_ends = []
+ bone_mats = []
+ for i in range(0, num_bones):
+ bone_ends += struct.unpack("ii", contents[cursor:cursor+8])
+ cursor += 8
+
+ print(bone_ends) \ No newline at end of file
diff --git a/Data/BlenderScript/addons/export_physics.py b/Data/BlenderScript/addons/export_physics.py
new file mode 100644
index 00000000..efb8fa16
--- /dev/null
+++ b/Data/BlenderScript/addons/export_physics.py
@@ -0,0 +1,85 @@
+import bpy
+import struct
+from bpy.props import *
+
+def GetWriteStream():
+ file_bytes = []
+ for obj in bpy.data.objects:
+ if obj.select:
+ obj_bytes = bytes()
+ obj_bytes += struct.pack("i",len(obj.name))
+ for c in obj.name:
+ obj_bytes += struct.pack("c", c)
+ obj.rotation_mode = 'QUATERNION'
+ q = obj.rotation_quaternion
+ obj_bytes += struct.pack("ffff", q[0], q[1], q[2], q[3])
+ l = obj.location
+ obj_bytes += struct.pack("fff", l[0], l[1], l[2])
+ s = obj.scale
+ obj_bytes += struct.pack("fff", s[0], s[1], s[2])
+ file_bytes.append(obj_bytes)
+
+ write_file_bytes = bytes()
+ write_file_bytes += struct.pack("BBBBBBBB", 211, ord('F'), ord('Z'), ord('X'), ord('\r'), ord('\n'), 32, ord('\n')) #Add file identification
+ write_file_bytes += struct.pack("i", 1) #Add version
+ write_file_bytes += struct.pack("i", len(file_bytes)) #Add number of objects
+ for obj_bytes in file_bytes:
+ write_file_bytes += obj_bytes #Add contents
+ write_file_bytes += struct.pack("i", len(write_file_bytes)) #Add size of file
+ write_file_bytes += struct.pack("BBB", ord('F'), ord('Z'), ord('X')) #Add file end marker
+ return write_file_bytes
+
+def WriteFZX(filepath, data):
+ file = open(filepath, "wb")
+ file.write(data)
+ file.close()
+
+def Save(filepath):
+ data = GetWriteStream()
+ if not data:
+ return
+ WriteFZX(filepath, data)
+
+class WolfireFZXExporter(bpy.types.Operator):
+ '''Save Phoenix bone physics primitives'''
+ bl_idname = "export_physics.fzx"
+ bl_label = "Export FZX"
+
+ filepath = StringProperty(name="File Path", description="Filepath used for exporting the FZX file", maxlen= 1024, default= "")
+
+ check_existing = BoolProperty(name="Check Existing", description="Check and warn on overwriting existing files", default=True, options={'HIDDEN'})
+
+ def execute(self, context):
+ Save(self.properties.filepath)
+ return {'FINISHED'}
+
+ def invoke(self, context, event):
+ wm = context.window_manager
+ wm.add_fileselect(self)
+ return {'RUNNING_MODAL'}
+
+bl_addon_info = {
+ 'name': 'Import/Export: Phoenix collision physics (.fzx)...',
+ 'author': 'David Rosen',
+ 'version': '0.1',
+ 'blender': (2, 5, 5),
+ 'location': 'File > Import/Export > FZX',
+ 'description': 'Export Phoenix collision physics (.fzx format)',
+ 'warning': '', # used for warning icon and text in addons panel
+ 'category': 'Import/Export'}
+
+import bpy
+
+def menu_export(self, context):
+ import os
+ default_path = os.path.splitext(bpy.data.filepath)[0] + ".fzx"
+ self.layout.operator(WolfireFZXExporter.bl_idname, text="Phoenix Collision Physics (.fzx)").filepath = default_path
+
+def register():
+ bpy.types.INFO_MT_file_export.append(menu_export)
+
+def unregister():
+ bpy.types.INFO_MT_file_export.remove(menu_export)
+
+if __name__ == "__main__":
+ register()
diff --git a/Data/BlenderScript/addons/fixedconstraint.py b/Data/BlenderScript/addons/fixedconstraint.py
new file mode 100644
index 00000000..dbbb50cb
--- /dev/null
+++ b/Data/BlenderScript/addons/fixedconstraint.py
@@ -0,0 +1,72 @@
+from mathutils import Matrix, Vector
+import math
+
+"""
+This script adds a Phoenix fixed constraint
+"""
+
+bl_addon_info = {
+ 'name': 'Armature: Add fixed constraint',
+ 'author': 'David Rosen',
+ 'version': '1',
+ 'blender': (2, 5, 4),
+ 'location': 'Armature > Fixed constraint',
+ 'description': 'This script adds a Phoenix fixed constraint',
+ 'warning': '', # used for warning icon and text in addons panel
+ 'wiki_url': '',
+ 'tracker_url': '',
+ 'category': 'Armature'}
+
+import bpy
+
+def FixBoneRoll(context):
+ obj = bpy.context.scene.objects.active
+ bone = obj.data.edit_bones.active
+ if not bone:
+ print("Need to select a bone first...")
+ return
+
+ name = bone.name
+
+ bone = obj.data.edit_bones[name]
+ pose_bone = obj.pose.bones[bone.name]
+
+ constraint = pose_bone.constraints.new('LIMIT_ROTATION')
+ constraint.min_x = 0
+ constraint.max_x = 0
+ constraint.min_y = 0
+ constraint.max_y = 0
+ constraint.min_z = 0
+ constraint.max_z = 0
+ constraint.use_limit_x = True
+ constraint.use_limit_y = True
+ constraint.use_limit_z = True
+ constraint.name = "RGDL_Limit Rotation"
+ constraint.influence = 0.0
+ constraint.owner_space = 'LOCAL'
+
+class FixedConstraintOp(bpy.types.Operator):
+ '''Add fixed constraint'''
+ bl_idname = 'armature.fixedconstraint'
+ bl_label = 'Add fixed constraint'
+ bl_options = {'REGISTER', 'UNDO'}
+
+ @classmethod
+ def poll(self, context):
+ obj = context.active_object
+ return (obj and obj.type == 'ARMATURE')
+
+ def execute(self, context):
+ FixBoneRoll(context)
+ return {'FINISHED'}
+
+menu_func = (lambda self, context: self.layout.operator(FixedConstraintOp.bl_idname, text="Fixed constraint"))
+
+def register():
+ bpy.types.VIEW3D_MT_edit_armature.append(menu_func)
+
+def unregister():
+ bpy.types.VIEW3D_MT_edit_armature.remove(menu_func)
+
+if __name__ == "__main__":
+ register()
diff --git a/Data/BlenderScript/addons/genericconstraint.py b/Data/BlenderScript/addons/genericconstraint.py
new file mode 100644
index 00000000..afe39153
--- /dev/null
+++ b/Data/BlenderScript/addons/genericconstraint.py
@@ -0,0 +1,102 @@
+from mathutils import Matrix, Vector
+import math
+
+"""
+This script adds a Phoenix generic rotation constraint
+"""
+
+bl_addon_info = {
+ 'name': 'Armature: Add generic constraint',
+ 'author': 'David Rosen',
+ 'version': '1',
+ 'blender': (2, 5, 4),
+ 'location': 'Armature > Generic constraint',
+ 'description': 'This script adds a Phoenix generic rotation constraint',
+ 'warning': '', # used for warning icon and text in addons panel
+ 'wiki_url': '',
+ 'tracker_url': '',
+ 'category': 'Armature'}
+
+import bpy
+
+def GetD6Mat(edit_bone):
+ vec = (edit_bone.tail - edit_bone.head).normalize()
+ vec = Vector((vec[0], vec[2], -vec[1]))
+ right = Vector((0.0001,1.0001,0.000003))
+ up = vec.cross(right).normalize()
+ right = up.cross(vec).normalize()
+
+ return [right[0], right[1], right[2], 0,
+ up[0], up[1], up[2], 0,
+ vec[0],vec[1], vec[2], 0,
+ 0,0,0,1]
+
+def FixBoneRoll(context):
+ obj = bpy.context.scene.objects.active
+ bone = obj.data.edit_bones.active
+ if not bone:
+ print("Need to select a bone first...")
+ return
+
+ name = bone.name
+
+ bpy.ops.object.mode_set(mode='POSE')
+ bpy.ops.object.mode_set(mode='EDIT')
+
+ bone = obj.data.edit_bones[name]
+
+ matrix = GetD6Mat(bone)
+ joint_axis = Vector((matrix[0], -matrix[2], matrix[1]))
+
+ pose_bone = obj.pose.bones[bone.name]
+
+ mat = obj.matrix_world * pose_bone.matrix
+ x_axis = Vector((mat[0][0], mat[0][1], mat[0][2])).normalize()
+ y_axis = Vector((mat[2][0], mat[2][1], mat[2][2])).normalize()
+ joint_conv = Vector((-x_axis.dot(joint_axis), -y_axis.dot(joint_axis), 0)).normalize()
+ roll = math.atan2(joint_conv[0], joint_conv[1])
+
+ print("Changing "+bone.name+" roll from "+str(bone.roll)+" to "+str(bone.roll+roll))
+ bone.roll += roll
+
+ pose_bone.rotation_mode = 'YXZ'
+
+ constraint = pose_bone.constraints.new('LIMIT_ROTATION')
+ constraint.min_x = -0.01745
+ constraint.max_x = 0.01745
+ constraint.min_y = -0.01745
+ constraint.max_y = 0.01745
+ constraint.min_z = -0.01745
+ constraint.max_z = 0.01745
+ constraint.name = "RGDL_Limit Rotation"
+ constraint.use_limit_x = True
+ constraint.use_limit_y = True
+ constraint.use_limit_z = True
+ constraint.influence = 0.0
+ constraint.owner_space = 'LOCAL'
+
+class BoneRollOp(bpy.types.Operator):
+ '''Add generic rotation constraint'''
+ bl_idname = 'armature.genericconstraint'
+ bl_label = 'Add generic rotation constraint'
+ bl_options = {'REGISTER', 'UNDO'}
+
+ @classmethod
+ def poll(self, context):
+ obj = context.active_object
+ return (obj and obj.type == 'ARMATURE')
+
+ def execute(self, context):
+ FixBoneRoll(context)
+ return {'FINISHED'}
+
+menu_func = (lambda self, context: self.layout.operator(BoneRollOp.bl_idname, text="Rotation constraint"))
+
+def register():
+ bpy.types.VIEW3D_MT_edit_armature.append(menu_func)
+
+def unregister():
+ bpy.types.VIEW3D_MT_edit_armature.remove(menu_func)
+
+if __name__ == "__main__":
+ register()
diff --git a/Data/BlenderScript/addons/hingeconstraint.py b/Data/BlenderScript/addons/hingeconstraint.py
new file mode 100644
index 00000000..979ef88e
--- /dev/null
+++ b/Data/BlenderScript/addons/hingeconstraint.py
@@ -0,0 +1,74 @@
+from mathutils import Matrix, Vector
+import math
+
+"""
+This script adds a Phoenix hinge constraint
+"""
+
+bl_addon_info = {
+ 'name': 'Armature: Add hinge constraint',
+ 'author': 'David Rosen',
+ 'version': '1',
+ 'blender': (2, 5, 4),
+ 'location': 'Armature > Hinge constraint',
+ 'description': 'This script adds a Phoenix hinge constraint',
+ 'warning': '', # used for warning icon and text in addons panel
+ 'wiki_url': '',
+ 'tracker_url': '',
+ 'category': 'Armature'}
+
+import bpy
+
+def FixBoneRoll(context):
+ obj = bpy.context.scene.objects.active
+ bone = obj.data.edit_bones.active
+ if not bone:
+ print("Need to select a bone first...")
+ return
+
+ name = bone.name
+
+ bone = obj.data.edit_bones[name]
+ pose_bone = obj.pose.bones[bone.name]
+
+ pose_bone.rotation_mode = 'XYZ'
+
+ constraint = pose_bone.constraints.new('LIMIT_ROTATION')
+ constraint.min_x = -0.01745
+ constraint.max_x = 0.01745
+ constraint.min_y = 0
+ constraint.max_y = 0
+ constraint.min_z = 0
+ constraint.max_z = 0
+ constraint.name = "RGDL_Limit Rotation"
+ constraint.use_limit_x = True
+ constraint.use_limit_y = True
+ constraint.use_limit_z = True
+ constraint.influence = 0.0
+ constraint.owner_space = 'LOCAL'
+
+class HingeConstraintOp(bpy.types.Operator):
+ '''Add hinge constraint'''
+ bl_idname = 'armature.hingeconstraint'
+ bl_label = 'Add hinge constraint'
+ bl_options = {'REGISTER', 'UNDO'}
+
+ @classmethod
+ def poll(self, context):
+ obj = context.active_object
+ return (obj and obj.type == 'ARMATURE')
+
+ def execute(self, context):
+ FixBoneRoll(context)
+ return {'FINISHED'}
+
+menu_func = (lambda self, context: self.layout.operator(HingeConstraintOp.bl_idname, text="Hinge constraint"))
+
+def register():
+ bpy.types.VIEW3D_MT_edit_armature.append(menu_func)
+
+def unregister():
+ bpy.types.VIEW3D_MT_edit_armature.remove(menu_func)
+
+if __name__ == "__main__":
+ register()
diff --git a/Data/BlenderScript/addons/io_anm/__init__.py b/Data/BlenderScript/addons/io_anm/__init__.py
new file mode 100644
index 00000000..5fd0af04
--- /dev/null
+++ b/Data/BlenderScript/addons/io_anm/__init__.py
@@ -0,0 +1,38 @@
+bl_addon_info = {
+ 'name': 'Import/Export: Phoenix animation (.anm)...',
+ 'author': 'David Rosen',
+ 'version': '0.1',
+ 'blender': (2, 5, 4),
+ 'location': 'File > Import/Export > ANM',
+ 'description': 'Import Phoenix animation (.anm format)',
+ 'warning': '', # used for warning icon and text in addons panel
+ 'category': 'Import/Export'}
+
+import bpy
+
+def menu_import(self, context):
+ from io_anm import import_anm
+ self.layout.operator(import_anm.ANMImporter.bl_idname, text="Phoenix Animation (.anm)").filepath = "*.anm"
+
+def menu_export(self, context):
+ from io_anm import export_anm
+ import os
+ default_path = os.path.splitext(bpy.data.filepath)[0] + ".anm"
+ self.layout.operator(export_anm.ANMExporter.bl_idname, text="Phoenix Animation (.anm)").filepath = default_path
+
+def register():
+ from io_anm import import_anm, export_anm
+ bpy.types.register(import_anm.ANMImporter)
+ bpy.types.register(export_anm.ANMExporter)
+ bpy.types.INFO_MT_file_import.append(menu_import)
+ bpy.types.INFO_MT_file_export.append(menu_export)
+
+def unregister():
+ from io_anm import import_anm, export_anm
+ #bpy.types.unregister(import_anm.ANMImporter)
+ #bpy.types.unregister(export_anm.ANMExporter)
+ bpy.types.INFO_MT_file_import.remove(menu_import)
+ bpy.types.INFO_MT_file_export.remove(menu_export)
+
+if __name__ == "__main__":
+ register()
diff --git a/Data/BlenderScript/addons/io_anm/anm_types.py b/Data/BlenderScript/addons/io_anm/anm_types.py
new file mode 100644
index 00000000..50abdc6e
--- /dev/null
+++ b/Data/BlenderScript/addons/io_anm/anm_types.py
@@ -0,0 +1,64 @@
+import array
+
+def Get4ByteIntArray():
+ var = array.array('l')
+ if var.itemsize != 4:
+ var = array.array('i')
+ return var
+
+class ShapeKey:
+ def __init__(self):
+ self.weight = 0.0
+ self.string = "null"
+ def __repr__(self):
+ return "(\""+self.string+"\", "+str(self.weight)+")"
+
+class StatusKey:
+ def __init__(self):
+ self.weight = 0.0
+ self.string = "null"
+ def __repr__(self):
+ return "(\""+self.string+"\", "+str(self.weight)+")"
+
+class IKBone:
+ def __init__(self):
+ self.start = array.array('f')
+ self.end = array.array('f')
+ self.bone_path = Get4ByteIntArray()
+ self.string = "null"
+ def __repr__(self):
+ return "(\""+self.string+"\", "+str(self.bone_path)+")"
+
+class Event:
+ def __init__(self):
+ self.which_bone = 0
+ self.string = ""
+ def __repr__(self):
+ return "(\""+self.string+"\", "+str(self.which_bone)+")"
+
+
+class Keyframe:
+ def __init__(self):
+ self.time = 0
+ self.weights = array.array('f')
+ self.mobility_mat = array.array('f')
+ self.mats = []
+ self.weapon_mats = []
+ self.weap_relative_ids = []
+ self.weap_relative_weights = []
+ self.events = []
+ self.ik_bones = []
+ self.shape_keys = []
+ self.status_keys = []
+ def __repr__(self):
+ return "("+str(self.time)+", "+str(len(self.mats))+")"
+
+class ANMdata:
+ def __init__(self):
+ self.version = 0
+ self.looping = 0
+ self.start = 0
+ self.end = 0
+ self.keyframes = []
+ self.name = ""
+
diff --git a/Data/BlenderScript/addons/io_anm/export_anm.py b/Data/BlenderScript/addons/io_anm/export_anm.py
new file mode 100644
index 00000000..493a7ff0
--- /dev/null
+++ b/Data/BlenderScript/addons/io_anm/export_anm.py
@@ -0,0 +1,822 @@
+import array
+import bpy
+from bpy.props import *
+from io_anm import anm_types
+import operator
+from mathutils import Matrix, Euler, Vector, Quaternion
+
+BONE_DEF_LAYER = 29
+MOBILITY_LAYER = 25
+WEAPON_DEF_LAYER = 24
+WEAPON_VIEW_LAYER = 16
+NUM_LAYERS = 32
+
+def Get4ByteIntArray(): # Creates an array with 4-byte components,
+ var = array.array('l') # regardless of whether that means 'l' or 'i'
+ if var.itemsize != 4:
+ var = array.array('i')
+ return var
+
+def AddConnectingBones(arm_data): # Add a bone between disconnected parent-child pairs,
+ for bone in arm_data.edit_bones: # such as the head and the ears.
+ if not bone.parent or \
+ bone.name == "root" or \
+ bone.parent.name == "root" or \
+ bone.layers[BONE_DEF_LAYER] == False or \
+ bone.parent.layers[BONE_DEF_LAYER] == False or \
+ bone.head == bone.parent.tail:
+ continue
+ bpy.ops.armature.bone_primitive_add(name="connector")
+ new_bone = arm_data.edit_bones[-1]
+ new_bone.head = bone.parent.tail
+ new_bone.tail = bone.head
+ new_bone.parent = bone.parent
+ for i in range(NUM_LAYERS):
+ new_bone.layers[i] = False
+ new_bone.layers[BONE_DEF_LAYER] = True
+ bone.parent = new_bone
+
+ bpy.context.scene.update()
+ bpy.ops.object.mode_set(mode='OBJECT')
+ bpy.ops.object.mode_set(mode='EDIT')
+
+class BoneNameTranslator():
+ def __init__(self, arm_data):
+ bone_labels = {} # {int bone_id, bool exists}
+ self.to_bone_map = {} # {string desc_label, string bone_id_label}
+ self.from_bone_map = {} # {string bone_id_label, string desc_label}
+ self.num_weapons = 0
+
+ for bone in arm_data.bones: # Handle bones that are already named "Bone_#"
+ if bone.name == "root" or bone.layers[BONE_DEF_LAYER] == False:
+ continue
+ if bone.name.split("_")[0] == "Bone":
+ bone_id = int(bone.name.split("_")[-1])
+ bone_labels[bone_id] = True
+ self.to_bone_map[bone.name] = bone.name
+ self.from_bone_map[bone.name] = bone.name
+
+ unused_bone_id = 0
+ for bone in arm_data.bones: # Rename and record all skeleton bones
+ if bone.name == "root" or bone.layers[BONE_DEF_LAYER] == False:
+ continue
+ if bone.name.split("_")[0] != "Bone":
+ while unused_bone_id in bone_labels:
+ unused_bone_id = unused_bone_id + 1
+ self.from_bone_map["Bone_"+str(unused_bone_id)] = bone.name
+ self.to_bone_map[bone.name] = "Bone_"+str(unused_bone_id)
+ bone.name = "Bone_"+str(unused_bone_id)
+ unused_bone_id = unused_bone_id+1
+
+ unused_weapon_id = 0
+ for bone in arm_data.bones: # Rename and record all weapon bones
+ if bone.name == "root" or \
+ bone.layers[WEAPON_DEF_LAYER] == False or \
+ bone.name.split('.')[0] == "connector":
+ continue
+ if bone.name.split("_")[0] != "Weap":
+ while unused_weapon_id in bone_labels:
+ unused_weapon_id = unused_weapon_id + 1
+ self.from_bone_map["Weap_"+str(unused_weapon_id)] = bone.name
+ self.to_bone_map[bone.name] = "Weap_"+str(unused_weapon_id)
+ bone.name = "Weap_"+str(unused_weapon_id)
+ unused_weapon_id += 1
+ self.num_weapons += 1
+ def ToBoneName(self,name):
+ return self.to_bone_map.get(name)
+ def FromBoneName(self,name):
+ return self.from_bone_map.get(name)
+
+def to_array(array, idarray): #Populate an array with elements from a list or dictionary
+ for item in idarray:
+ array.append(item)
+
+def GetInitialBoneMatZ(edit_bone): #Get initial bone matrix that is facing the z axis
+ z_vec = (edit_bone.tail - edit_bone.head).normalize()
+ x_vec = Vector((0,0,1))
+ y_vec = x_vec.cross(z_vec).normalize()
+ x_vec = y_vec.cross(z_vec).normalize()
+
+ return [x_vec[0], x_vec[2], -x_vec[1], 0,
+ y_vec[0], y_vec[2], -y_vec[1], 0,
+ z_vec[0], z_vec[2], -z_vec[1], 0,
+ 0,0,0,1]
+
+
+def GetInitialBoneMat(edit_bone): #Get initial bone matrix that is facing the y axis (or z if needed)
+ z_vec = (edit_bone.tail - edit_bone.head).normalize()
+ y_vec = Vector((0,1,0))
+ if abs(z_vec.dot(y_vec)) > 0.95:
+ return GetInitialBoneMatZ(edit_bone)
+ x_vec = z_vec.cross(y_vec).normalize()
+ y_vec = z_vec.cross(x_vec).normalize()
+
+ return [x_vec[0], x_vec[2], -x_vec[1], 0,
+ y_vec[0], y_vec[2], -y_vec[1], 0,
+ z_vec[0], z_vec[2], -z_vec[1], 0,
+ 0,0,0,1]
+
+initial_bone_mats = {} # { string name, float mat[16] }
+def CalcInitialBoneMats(data): # Calculate initial bone matrices if they are not already provided
+ for bone in data.edit_bones:
+ if not bone.get("mat"):
+ initial_bone_mats[bone.name] = GetInitialBoneMat(bone)
+ else:
+ initial_bone_mats[bone.name] = bone["mat"]
+
+def GetBoneMatrixRotation(arm_data, num, rotations, rotation_modes):
+ bone = arm_data.bones["Bone_"+str(num)]
+ if rotation_modes[num] == 'QUATERNION':
+ matrix = Quaternion(rotations[num]).normalize().to_matrix()
+ else:
+ matrix = Euler(rotations[num][0:3],rotation_modes[num]).to_matrix()
+
+ # Convert matrix to bone space
+ matrix = bone.matrix_local.to_3x3() * matrix * bone.matrix_local.copy().to_3x3().invert()
+
+ # Convert matrix to parent space by applying initial bone rotation
+ if initial_bone_mats[bone.name]:
+ mat = array.array('f')
+ to_array(mat, initial_bone_mats[bone.name])
+ initial_matrix = Matrix([mat[0], -mat[2], mat[1], mat[3]],
+ [mat[4], -mat[6], mat[5], mat[7]],
+ [mat[8], -mat[10], mat[9], mat[11]],
+ [mat[12], -mat[14], mat[13], mat[15]])
+ matrix = matrix.to_4x4() * initial_matrix
+ else:
+ print("Could not find: " + bone.name)
+
+ # Convert matrix to world space by recursively applying all parent rotations
+ parent = bone.parent
+ if not parent or parent.name == "root" or parent.layers[BONE_DEF_LAYER] == False or bone.layers[BONE_DEF_LAYER] == False:
+ return matrix
+ if initial_bone_mats.get(parent.name):
+ mat = array.array('f')
+ to_array(mat, initial_bone_mats[parent.name])
+ parent_initial_matrix = Matrix([mat[0], -mat[2], mat[1], mat[3]],
+ [mat[4], -mat[6], mat[5], mat[7]],
+ [mat[8], -mat[10], mat[9], mat[11]],
+ [mat[12], -mat[14], mat[13], mat[15]])
+
+ parent_num = int(parent.name.split("_")[-1])
+ parent_matrix = GetBoneMatrixRotation(arm_data, parent_num, rotations, rotation_modes)
+
+ matrix = parent_matrix * parent_initial_matrix.copy().invert() * matrix
+ return matrix
+
+def GetBoneMatrixTranslation(arm_data, num, rotations, rotation_modes):
+ bone = arm_data.bones["Bone_"+str(num)]
+
+ if rotation_modes[num] == 'QUATERNION':
+ matrix = Quaternion(rotations[num]).normalize().to_matrix()
+ else:
+ matrix = Euler(rotations[num][0:3],rotation_modes[num]).to_matrix()
+ matrix = bone.matrix_local * matrix.resize4x4()
+
+ # Convert matrix to world space by recursively applying all parent rotations
+ parent = bone.parent
+ if not parent or parent.name == "root" or parent.layers[BONE_DEF_LAYER] == False or bone.layers[BONE_DEF_LAYER] == False:
+ return matrix
+
+ parent_num = int(parent.name.split("_")[-1])
+ parent_matrix = GetBoneMatrixTranslation(arm_data, parent_num, rotations, rotation_modes) * parent.matrix_local.copy().invert()
+
+ matrix = parent_matrix * matrix
+
+ return matrix
+
+def GetNumJoints(arm_obj):
+ num = 0
+ for bone in arm_obj.pose.bones:
+ for constraint in bone.constraints:
+ if constraint.type == 'LIMIT_ROTATION' and constraint.owner_space == 'LOCAL':
+ num += 1
+ return num
+
+def KeyframeFromPose(arm_obj, rotations, rotation_modes, translation, num_joints):
+ arm_data = arm_obj.data
+ matrices = []
+
+ keyframe = anm_types.Keyframe()
+ for num in range(len(rotations)):
+ # Get rotation matrix, then set translation
+ matrix = GetBoneMatrixRotation(arm_data, num, rotations, rotation_modes)
+ trans_matrix = GetBoneMatrixTranslation(arm_data, num, rotations, rotation_modes)
+
+ bone = arm_data.bones["Bone_"+str(num)]
+ matrix[3] = trans_matrix * Vector((0,bone.length*0.5,0,1));
+ #matrix[3] = matrix[3] + Vector((translation[1], -translation[0], translation[2], 0))
+ matrix[3] = matrix[3] + Vector((translation[0], translation[1], translation[2], 0))
+ '''bpy.ops.object.add(type='EMPTY', location=(matrix[3][0]+3,matrix[3][1],matrix[3][2]))
+ ob = bpy.context.scene.objects.active
+ ob.empty_draw_type = 'ARROWS'
+ ob.empty_draw_size = 0.1
+ ob.rotation_mode = 'QUATERNION'
+ ob.rotation_quaternion = matrix.rotation_part().to_quat()'''
+
+ mat = array.array('f')
+ mat.fromlist([matrix[0][0], matrix[0][2], -matrix[0][1], matrix[0][3],
+ matrix[1][0], matrix[1][2], -matrix[1][1], matrix[1][3],
+ matrix[2][0], matrix[2][2], -matrix[2][1], matrix[2][3],
+ matrix[3][0], matrix[3][2], -matrix[3][1], matrix[3][3]])
+ keyframe.mats.append(mat)
+
+ return keyframe
+
+
+class KeyframeInfo:
+ def __init__(self):
+ self.weap_relative_weight = {}
+ self.weap_relative_id = {}
+ self.weap_translations = {}
+ self.weap_rotations = {}
+ self.mobility_translation = {}
+ self.mobility_rotation = {}
+ self.rotations = {}
+ self.rotation_modes = {}
+ self.weights = {}
+ self.translation = array.array('f')
+ self.translation.fromlist([0,0,0])
+ self.translation_offset = array.array('f')
+ self.translation_offset.fromlist([0,0,0])
+ self.ik_bones = []
+ self.shape_keys = []
+ self.status_keys = []
+ self.events = []
+ def __repr__(self):
+ string = ""
+ for bone_id in self.rotations:
+ string = string + "Bone " + str(bone_id) + ": \n"
+ string = string + str(self.rotations[bone_id])
+ string = string + "\n"
+ return string
+
+old_trans = {} # {string, Vector()} Initial translation of each bone
+
+# We are at a specific frame, so read back the bone matrix info
+def BakeBoneConstraints(bone,obj,old_obj,parent_matrix):
+ pose_bone = old_obj.pose.bones[bone.name]
+
+ inv_bone_matrix = bone.matrix_local.copy().invert()
+ inv_parent_matrix = parent_matrix.copy().invert()
+
+ matrix_local = inv_bone_matrix * inv_parent_matrix * pose_bone.matrix
+
+ # Set keyframes for local translation and rotation values
+ new_pose_bone = obj.pose.bones[bone.name]
+ new_pose_bone.location = matrix_local.translation_part()
+ new_pose_bone.keyframe_insert("location")
+
+ if new_pose_bone.rotation_mode == 'QUATERNION':
+ new_pose_bone.rotation_quaternion = matrix_local.to_quat().normalize()
+ new_pose_bone.keyframe_insert("rotation_quaternion")
+ else:
+ new_pose_bone.rotation_euler = matrix_local.to_euler(pose_bone.rotation_mode)
+ new_pose_bone.keyframe_insert("rotation_euler")
+
+ matrix = parent_matrix * bone.matrix_local * new_pose_bone.matrix_local * inv_bone_matrix
+
+ if bone.name != "root" and bone.layers[BONE_DEF_LAYER] == True:
+ mat = pose_bone.matrix
+ new_trans = mat.translation_part()
+ new_pose_bone.location = new_trans - old_trans[bone.name]*2
+ new_pose_bone.keyframe_insert("location")
+
+ if bone.layers[WEAPON_DEF_LAYER] == True or bone.name == "mobility":
+ old_pose_bone = old_obj.pose.bones[bone.name]
+ new_pose_bone = obj.pose.bones[bone.name]
+ new_pose_bone.location = (old_pose_bone.matrix[3][0],old_pose_bone.matrix[3][1],old_pose_bone.matrix[3][2])
+ new_pose_bone.keyframe_insert("location")
+ new_pose_bone.rotation_quaternion = old_pose_bone.matrix.to_quat().normalize()
+ new_pose_bone.keyframe_insert("rotation_quaternion")
+
+ for child in bone.children:
+ BakeBoneConstraints(child,obj,old_obj,matrix)
+
+#stretch_amount = 600
+stretch_amount = 1
+
+# Walk through each keyframe and bake all constraints
+def BakeConstraints(obj):
+ action = obj.animation_data.action
+
+ times = {}
+
+ for fcurve in action.fcurves:
+ for key in fcurve.keyframe_points:
+ key.co[0] = int(key.co[0]*stretch_amount)
+ key.handle_left[0] = int(key.handle_left[0]*stretch_amount)
+ key.handle_right[0] = int(key.handle_right[0]*stretch_amount)
+ times[key.co[0]] = True
+
+ sorted_times = []
+ for time in times:
+ sorted_times.append(time)
+ sorted_times.sort()
+
+ bpy.ops.object.duplicate()
+ new_obj = bpy.context.scene.objects.active
+
+ # Go to each frame in sequence so Blender calculates the bone matrices
+ frame = bpy.context.scene.frame_current
+ for time in sorted_times:
+ num = int(time)
+ bpy.context.scene.frame_set(num)
+ BakeBoneConstraints(obj.data.bones["root"], obj, new_obj, Matrix())
+ bpy.context.scene.frame_set(frame)
+ '''
+ action = new_obj.animation_data.action
+
+ bpy.ops.object.mode_set(mode='OBJECT')
+ bpy.ops.object.select_all(action = 'DESELECT')
+ bpy.context.scene.objects.active = new_obj
+ bpy.context.scene.objects.active.select = True'''
+ bpy.ops.object.delete()
+ bpy.context.scene.objects.active = obj
+
+ action.user_clear()
+
+ for fcurve in action.fcurves:
+ for key in fcurve.keyframe_points:
+ key.co[0] /= stretch_amount
+ key.handle_left[0] /= stretch_amount
+ key.handle_right[0] /= stretch_amount
+
+ bpy.ops.object.mode_set(mode='EDIT')
+ for bone in obj.data.edit_bones:
+ if bone.layers[BONE_DEF_LAYER] == False:
+ continue
+ while bone.parent and bone.parent.layers[BONE_DEF_LAYER] == False and bone.parent.parent:
+ bone.parent = bone.parent.parent
+ bpy.context.scene.update()
+ bpy.ops.object.mode_set(mode='OBJECT')
+ bpy.ops.object.mode_set(mode='EDIT')
+
+def GetAnimation(filepath):
+ old_arm_obj = bpy.context.scene.objects.active
+
+ if old_arm_obj.type != 'ARMATURE':
+ print("PHXBN armature must be selected")
+ return
+
+ #Create a copy of the armature object so we don't mess up the original
+ bpy.ops.object.mode_set(mode='OBJECT')
+ bpy.ops.object.duplicate()
+ arm_obj = bpy.context.scene.objects.active
+
+ #Add connecting bones between parent-child pairs that are not connected
+ bpy.ops.object.mode_set(mode='EDIT')
+ AddConnectingBones(arm_obj.data)
+
+ #Rename relevant bones to Bone_0, Bone_1, etc
+ tbr = BoneNameTranslator(arm_obj.data)
+
+ #txt_file = open(filepath+".txt", "w")
+ #for num in range(100):
+ # name = "Bone_"+str(num)
+ # if tbr.from_bone_map.get(name):
+ # txt_file.write(name+": "+tbr.from_bone_map.get(name)+'\n')
+
+ #Get initial root translation of each bone
+ for bone in arm_obj.data.edit_bones:
+ old_trans[bone.name] = bone.head - arm_obj.data.edit_bones["root"].head
+
+ #Get initial bone rotation of each bone (using OG Bullet physics orientations)
+ CalcInitialBoneMats(arm_obj.data)
+
+ #Bake the constraints into the bone matrices
+ bpy.ops.object.mode_set(mode='OBJECT')
+ BakeConstraints(arm_obj)
+
+ action = arm_obj.animation_data.action
+ if not action:
+ print("Armature must have an action")
+ return
+
+ #Create animation data structure and start extracting the easy data
+ data = anm_types.ANMdata()
+
+ if action.get("Looping"):
+ data.looping = action["Looping"]
+ else:
+ data.looping = False
+
+ if action.get("Start"):
+ data.start = action["Start"]
+ else:
+ data.start = int(bpy.context.scene.frame_start*1000/bpy.context.scene.render.fps)
+
+ if action.get("End"):
+ data.end = action["End"]
+ else:
+ data.end = int(bpy.context.scene.frame_end*1000/bpy.context.scene.render.fps)
+
+ if action.get("Version"):
+ data.version = action["Version"]
+ else:
+ data.version = 4
+
+ keyframes = {}
+
+ # Record all of the times that have keyframes on them
+ pose_bones = arm_obj.pose.bones
+ for fcurve in action.fcurves:
+ for keyframe in fcurve.keyframe_points:
+ time = round(keyframe.co[0]*1000/bpy.context.scene.render.fps)
+ if not time in keyframes:
+ keyframes[time] = KeyframeInfo()
+
+ # Get an ordered list of all the keyframe times
+ keyframe_times = []
+ for time in keyframes:
+ keyframe_times.append(time)
+ keyframe_times.sort()
+
+ # Sample each fcurve at each keyframe time
+ for fcurve in action.fcurves:
+ quote_split = fcurve.data_path.split("\"")
+ if len(quote_split) < 2:
+ continue
+ event_split = fcurve.data_path.split("event_")
+ if len(event_split) == 2:
+ event_name = event_split[1].split("\"")[0]
+ for keyframe in fcurve.keyframe_points:
+ time = round(keyframe.co[0]*1000/bpy.context.scene.render.fps)
+ key_info = keyframes[time]
+ event = []
+ event.append(event_name)
+ bone_name = quote_split[1]
+ bone_id = int(bone_name.split("_")[-1])
+ event.append(bone_id)
+ key_info.events.append(event)
+ continue
+ weight_split = fcurve.data_path.split("weight")
+ if len(weight_split) == 2:
+ for time in keyframes:
+ key_info = keyframes[time]
+ bone_name = quote_split[1]
+ bone_id = int(bone_name.split("_")[-1])
+ key_info.weights[bone_id] = fcurve.evaluate(time/1000*bpy.context.scene.render.fps)
+ continue
+ shape_split = fcurve.data_path.split("shape_")
+ if len(shape_split) == 2:
+ shape_name = shape_split[1].split("\"")[0]
+ for time in keyframes:
+ key_info = keyframes[time]
+ shape_key = []
+ shape_key.append(shape_name)
+ shape_key.append(fcurve.evaluate(time/1000*bpy.context.scene.render.fps))
+ key_info.shape_keys.append(shape_key)
+ continue
+ status_split = fcurve.data_path.split("status_")
+ if len(status_split) == 2:
+ status_name = status_split[1].split("\"")[0]
+ for time in keyframes:
+ key_info = keyframes[time]
+ status = []
+ status.append(status_name)
+ status.append(fcurve.evaluate(time/1000*bpy.context.scene.render.fps))
+ key_info.status_keys.append(status)
+ continue
+ if quote_split[1] == "root":
+ if fcurve.data_path.split(".")[-1] == "location":
+ array_index = fcurve.array_index
+ for time in keyframes:
+ key_info = keyframes[time]
+ key_info.translation[array_index] = fcurve.evaluate(time/1000*bpy.context.scene.render.fps)
+ continue
+ bone_name =quote_split[1]
+ if bone_name.split("_")[0] == "Weap":
+ weap_id = int(bone_name.split("_")[-1])
+ array_index = fcurve.array_index
+ path_end = fcurve.data_path.split(".")[-1]
+ if path_end == "location":
+ print("Handling weap location "+str(weap_id))
+ for time in keyframe_times:
+ val = fcurve.evaluate(time/1000*bpy.context.scene.render.fps)
+ key_info = keyframes[time]
+ if not weap_id in key_info.weap_translations:
+ key_info.weap_translations[weap_id] = [0,0,0]
+ key_info.weap_translations[weap_id][array_index] = val
+ continue
+ if path_end == "influence":
+ name = quote_split[3]
+ constraint = pose_bones[bone_name].constraints[name]
+ if constraint.type == 'CHILD_OF':
+ #print("Child of detected")
+ #print(fcurve.data_path)
+ #print(constraint.target)
+ #print(constraint.subtarget)
+
+ for time in keyframe_times:
+ val = fcurve.evaluate(time/1000*bpy.context.scene.render.fps)
+ key_info = keyframes[time]
+ key_info.weap_relative_id[weap_id] = int(constraint.subtarget.split("_")[-1])
+ key_info.weap_relative_weight[weap_id] = val
+ #print(str(time) + " " + str(key_info.weap_relative_id[weap_id]) + " " + str(key_info.weap_relative_weight[weap_id]))
+ continue
+ if path_end != "rotation_quaternion":
+ continue
+ for time in keyframe_times:
+ val = fcurve.evaluate(time/1000*bpy.context.scene.render.fps)
+ key_info = keyframes[time]
+ if not weap_id in key_info.weap_rotations:
+ key_info.weap_rotations[weap_id] = [0,0,0,0]
+ key_info.weap_rotations[weap_id][array_index] = val
+ if bone_name == "mobility":
+ array_index = fcurve.array_index
+ path_end = fcurve.data_path.split(".")[-1]
+ if path_end == "location":
+ print("Handling mobility location "+str(array_index))
+ for time in keyframe_times:
+ val = fcurve.evaluate(time/1000*bpy.context.scene.render.fps)
+ key_info = keyframes[time]
+ key_info.mobility_translation[array_index] = val
+ continue
+ if path_end == "rotation_quaternion":
+ print("Handling mobility rotation "+str(array_index))
+ for time in keyframe_times:
+ val = fcurve.evaluate(time/1000*bpy.context.scene.render.fps)
+ key_info = keyframes[time]
+ key_info.mobility_rotation[array_index] = val
+ if not bone_name.split("_")[0] == "Bone":
+ continue
+ bone_id = int(bone_name.split("_")[-1])
+ array_index = fcurve.array_index
+ path_end = fcurve.data_path.split(".")[-1]
+ if path_end == "influence":
+ name = quote_split[3]
+ constraint = pose_bones["Bone_"+str(bone_id)].constraints[name]
+ if constraint.type != 'IK' or constraint.target:
+ continue
+ ik_bone_data = []
+ ik_bone_data.append(name)
+ ik_bone_data.append(bone_id)
+ for time in keyframe_times:
+ if fcurve.evaluate(time/1000*bpy.context.scene.render.fps)>0.5 :
+ key_info = keyframes[time]
+ key_info.ik_bones.append(ik_bone_data)
+ if path_end == "location":
+ if pose_bones["Bone_"+str(bone_id)].parent.name == "root":
+ for time in keyframe_times:
+ key_info = keyframes[time]
+ key_info.translation_offset[array_index] = fcurve.evaluate(time/1000*bpy.context.scene.render.fps)
+ continue
+ if path_end != "rotation_euler" and path_end != "rotation_quaternion":
+ continue
+ for time in keyframe_times:
+ key_info = keyframes[time]
+ key_info.rotation_modes[bone_id] = pose_bones["Bone_"+str(bone_id)].rotation_mode
+ if not bone_id in key_info.rotations:
+ key_info.rotations[bone_id] = []
+ while array_index >= len(key_info.rotations[bone_id]):
+ key_info.rotations[bone_id].append(0)
+ key_info.rotations[bone_id][array_index] = fcurve.evaluate(time/1000*bpy.context.scene.render.fps)
+
+ num_joints = GetNumJoints(arm_obj)
+
+ if not arm_obj.data.layers[WEAPON_VIEW_LAYER]:
+ print("Not exporting weapons because weapon layer is hidden")
+ tbr.num_weapons = 0
+ print("Num weapons: "+str(tbr.num_weapons))
+ for time in keyframe_times:
+ rotations = keyframes[time].rotations
+ rotation_modes = keyframes[time].rotation_modes
+ keyframes[time].translation[0] = keyframes[time].translation_offset[0]
+ keyframes[time].translation[1] = keyframes[time].translation_offset[1]
+ keyframes[time].translation[2] = keyframes[time].translation_offset[2]
+ translation = keyframes[time].translation
+ # Get bone matrices from joint rotations and root translation
+ keyframe = KeyframeFromPose(arm_obj, rotations, rotation_modes, translation, num_joints)
+ keyframe.time = time
+
+ if len(keyframes[time].mobility_translation) == 3 and \
+ len(keyframes[time].mobility_rotation) == 4:
+ m_translation = keyframes[time].mobility_translation
+ m_rotation = keyframes[time].mobility_rotation
+ quat = Quaternion()
+ quat[0] = m_rotation[0]
+ quat[1] = m_rotation[1]
+ quat[2] = m_rotation[2]
+ quat[3] = m_rotation[3]
+ matrix = quat.to_matrix().to_4x4()
+ matrix[3][0] = m_translation[0]
+ matrix[3][1] = m_translation[1]
+ matrix[3][2] = m_translation[2]
+ mat = array.array('f')
+ mat.fromlist([matrix[0][0], matrix[0][2], -matrix[0][1], matrix[0][3],
+ matrix[1][0], matrix[1][2], -matrix[1][1], matrix[1][3],
+ matrix[2][0], matrix[2][2], -matrix[2][1], matrix[2][3],
+ matrix[3][0], matrix[3][2], -matrix[3][1], matrix[3][3]])
+ keyframe.mobility_mat = mat
+
+ for i in range(len(rotations)):
+ if i in keyframes[time].weights:
+ keyframe.weights.append(keyframes[time].weights[i])
+ else:
+ keyframe.weights.append(0.0)
+ for i in range(tbr.num_weapons):
+ w_translation = keyframes[time].weap_translations[i]
+ rotation = keyframes[time].weap_rotations[i]
+ quat = Quaternion()
+ quat[0] = rotation[0]
+ quat[1] = rotation[1]
+ quat[2] = rotation[2]
+ quat[3] = rotation[3]
+ matrix = quat.to_matrix().to_4x4()
+ matrix[3][0] = w_translation[0]
+ matrix[3][1] = w_translation[1] - 0.03
+ matrix[3][2] = w_translation[2] - 0.69
+ mat = array.array('f')
+ mat.fromlist([matrix[0][0], matrix[0][2], -matrix[0][1], matrix[0][3],
+ matrix[1][0], matrix[1][2], -matrix[1][1], matrix[1][3],
+ matrix[2][0], matrix[2][2], -matrix[2][1], matrix[2][3],
+ matrix[3][0], matrix[3][2], -matrix[3][1], matrix[3][3]])
+ keyframe.weapon_mats.append(mat)
+ if i in keyframes[time].weap_relative_id:
+ keyframe.weap_relative_ids.append(keyframes[time].weap_relative_id[i])
+ else:
+ keyframe.weap_relative_ids.append(-1)
+ if i in keyframes[time].weap_relative_weight:
+ keyframe.weap_relative_weights.append(keyframes[time].weap_relative_weight[i])
+ else:
+ keyframe.weap_relative_weights.append(0.0)
+ for shape_key_data in keyframes[time].shape_keys:
+ shape_key = anm_types.ShapeKey()
+ shape_key.string = shape_key_data[0]
+ shape_key.weight = shape_key_data[1]
+ keyframe.shape_keys.append(shape_key)
+ for status_key_data in keyframes[time].status_keys:
+ status_key = anm_types.StatusKey()
+ status_key.string = status_key_data[0]
+ status_key.weight = status_key_data[1]
+ keyframe.status_keys.append(status_key)
+ for event_data in keyframes[time].events:
+ event = anm_types.Event()
+ event.string = event_data[0]
+ event.which_bone = event_data[1]
+ keyframe.events.append(event)
+ for ik_bone_data in keyframes[time].ik_bones:
+ #print("Adding an IK bone")
+ ik_bone = anm_types.IKBone()
+ ik_bone.string = ik_bone_data[0]
+ bone_id = ik_bone_data[1]
+ constraint = pose_bones["Bone_"+str(bone_id)].constraints[ik_bone.string]
+ path_length = constraint.chain_count
+ ik_bone.bone_path.append(bone_id)
+ index = bone_id
+ for i in range(path_length-1):
+ index = int(pose_bones["Bone_"+str(index)].parent.name.split("_")[-1])
+ ik_bone.bone_path.append(index)
+ ik_bone.bone_path.reverse()
+ vec = pose_bones["Bone_"+str(ik_bone.bone_path[0])].head
+ ik_bone.start.fromlist([vec[0],vec[1],vec[2]])
+ vec = pose_bones["Bone_"+str(ik_bone.bone_path[-1])].tail
+ ik_bone.end.fromlist([vec[0],vec[1],vec[2]])
+ keyframe.ik_bones.append(ik_bone)
+
+ data.keyframes.append(keyframe)
+
+ bpy.ops.object.mode_set(mode='OBJECT')
+ bpy.ops.object.select_all(action = 'DESELECT')
+ bpy.context.scene.objects.active = arm_obj
+ bpy.context.scene.objects.active.select = True
+ bpy.ops.object.delete()
+ bpy.context.scene.objects.active = old_arm_obj
+
+ action.user_clear()
+
+ #data.keyframes.sort(key=operator.attrgetter('time'))
+ #print(data.keyframes)
+
+ return data
+
+def WriteFloat(file, val):
+ int_loader = array.array('f')
+ int_loader.append(val)
+ int_loader.tofile(file)
+
+def WriteInt(file, val):
+ int_loader = Get4ByteIntArray()
+ int_loader.append(val)
+ int_loader.tofile(file)
+
+def WriteBool(file, val):
+ bool_loader = array.array('B')
+ bool_loader.append(val)
+ bool_loader.tofile(file)
+
+def WriteANM(filepath, data):
+
+ #print(data.keyframes[0])
+ file = open(filepath, "wb")
+
+ data.version = 10
+ num_keyframes = len(data.keyframes)
+ WriteInt(file, data.version)
+ WriteBool(file, False)
+ WriteBool(file, data.looping)
+ WriteInt(file, int(data.start))
+ WriteInt(file, int(data.end))
+ WriteInt(file, num_keyframes)
+
+ '''print("\nItem sizes:")
+ print(array.array('B').itemsize)
+ print(array.array('l').itemsize)
+ print(array.array('h').itemsize)
+ print(array.array('i').itemsize)
+ print(array.array('L').itemsize)
+ print(array.array('f').itemsize)'''
+ '''
+ print("\n")
+ print(data.version)
+ print(data.looping)
+ print(data.start)
+ print(data.end)
+ print(num_keyframes)'''
+
+ for i in range(num_keyframes):
+ keyframe = data.keyframes[i]
+ WriteInt(file, keyframe.time)
+ num_weights = len(keyframe.weights)
+ WriteInt(file, num_weights)
+ keyframe.weights.tofile(file)
+ num_bone_mats = len(keyframe.mats)
+ WriteInt(file, num_bone_mats)
+ for j in range(num_bone_mats):
+ keyframe.mats[j].tofile(file)
+ num_weapon_mats = len(keyframe.weapon_mats)
+ WriteInt(file, num_weapon_mats)
+ for j in range(num_weapon_mats):
+ keyframe.weapon_mats[j].tofile(file)
+ WriteInt(file,keyframe.weap_relative_ids[j])
+ WriteFloat(file,keyframe.weap_relative_weights[j])
+ if len(keyframe.mobility_mat) > 0:
+ WriteBool(file, True)
+ keyframe.mobility_mat.tofile(file)
+ else :
+ WriteBool(file, False)
+ num_events = len(keyframe.events)
+ WriteInt(file, num_events)
+ for event in keyframe.events:
+ WriteInt(file, event.which_bone)
+ string_array = array.array("B")
+ string_array.fromstring(event.string.encode("utf-8"))
+ WriteInt(file, len(string_array))
+ string_array.tofile(file)
+ num_ik_bones = len(keyframe.ik_bones)
+ WriteInt(file, num_ik_bones)
+ for ik_bone in keyframe.ik_bones:
+ ik_bone.start.tofile(file)
+ ik_bone.end.tofile(file)
+ WriteInt(file, len(ik_bone.bone_path))
+ ik_bone.bone_path.tofile(file)
+ string_array = array.array("B")
+ string_array.fromstring(ik_bone.string.encode("utf-8"))
+ WriteInt(file, len(string_array))
+ string_array.tofile(file)
+ num_shape_keys = len(keyframe.shape_keys)
+ WriteInt(file, num_shape_keys)
+ for shape_key in keyframe.shape_keys:
+ WriteFloat(file, shape_key.weight)
+ string_array = array.array("B")
+ string_array.fromstring(shape_key.string.encode("utf-8"))
+ WriteInt(file, len(string_array))
+ string_array.tofile(file)
+ num_status_keys = len(keyframe.status_keys)
+ WriteInt(file, num_status_keys)
+ for status_key in keyframe.status_keys:
+ WriteFloat(file, status_key.weight)
+ string_array = array.array("B")
+ string_array.fromstring(status_key.string.encode("utf-8"))
+ WriteInt(file, len(string_array))
+ string_array.tofile(file)
+
+def Save(filepath):
+ data = GetAnimation(filepath)
+ if not data:
+ return
+ WriteANM(filepath, data)
+
+class ANMExporter(bpy.types.Operator):
+ '''Save Phoenix Bones armature'''
+ bl_idname = "export_mesh.anm"
+ bl_label = "Export ANM"
+
+ filepath = StringProperty(name="File Path", description="Filepath used for exporting the ANM file", maxlen= 1024, default= "")
+
+ check_existing = BoolProperty(name="Check Existing", description="Check and warn on overwriting existing files", default=True, options={'HIDDEN'})
+
+ def execute(self, context):
+ Save(self.properties.filepath)
+ return {'FINISHED'}
+
+ def invoke(self, context, event):
+ wm = context.window_manager
+ wm.add_fileselect(self)
+ return {'RUNNING_MODAL'}
+
+#Save("C:\\Users\\David\\Desktop\\export.anm")
+#Save("Desktop/export.anm")
+#BakeConstraints(bpy.context.scene.objects.active)
+#Save("C:\\Users\\David\\Desktop\\WolfireSVN\\Project\\Data\\Animations\\r_bigdogswordattackover.anm")
diff --git a/Data/BlenderScript/addons/io_anm/import_anm.py b/Data/BlenderScript/addons/io_anm/import_anm.py
new file mode 100644
index 00000000..391dc13e
--- /dev/null
+++ b/Data/BlenderScript/addons/io_anm/import_anm.py
@@ -0,0 +1,433 @@
+import bpy
+from bpy.props import *
+import array
+import mathutils
+from mathutils import Vector, Matrix
+from io_anm import anm_types
+
+def Get4ByteIntArray():
+ var = array.array('l')
+ if var.itemsize != 4:
+ var = array.array('i')
+ return var
+
+class BoneNameTranslator():
+ def __init__(self, arm_data):
+ bone_labels = {}
+ self.to_bone_map = {}
+ self.from_bone_map = {}
+ num = 0
+ for bone in arm_data.bones:
+ if bone.name == "root" or bone.layers[0] == False:
+ continue
+ if bone.name.split("_")[0] == "Bone":
+ bone_id = int(bone.name.split("_")[-1])
+ bone_labels[bone_id] = True
+ self.to_bone_map[bone.name] = bone.name
+ self.from_bone_map[bone.name] = bone.name
+ num = num+1
+
+ index = 0
+ for bone in arm_data.bones:
+ if bone.name == "root" or bone.layers[0] == False:
+ continue
+ if bone.name.split("_")[0] != "Bone":
+ while index in bone_labels:
+ index = index + 1
+ self.from_bone_map["Bone_"+str(index)] = bone.name
+ self.to_bone_map[bone.name] = "Bone_"+str(index)
+ index = index+1
+ def ToBoneName(self,name):
+ return self.to_bone_map.get(name)
+ def FromBoneName(self,name):
+ return self.from_bone_map.get(name)
+
+def LoadInt(file):
+ int_loader = Get4ByteIntArray()
+ int_loader.fromfile(file, 1)
+ return int_loader[0]
+
+def LoadFloat(file):
+ float_loader = array.array('f')
+ float_loader.fromfile(file, 1)
+ return float_loader[0]
+
+def LoadBool(file):
+ bool_loader = array.array('B')
+ bool_loader.fromfile(file, 1)
+ return bool_loader[0]
+
+def GetBoneMatZ(edit_bone):
+ z_vec = (edit_bone.tail - edit_bone.head).normalize()
+ x_vec = Vector((0,0,1))
+ y_vec = x_vec.cross(z_vec).normalize()
+ x_vec = y_vec.cross(z_vec).normalize()
+
+ return [x_vec[0], x_vec[2], -x_vec[1], 0,
+ y_vec[0], y_vec[2], -y_vec[1], 0,
+ z_vec[0], z_vec[2], -z_vec[1], 0,
+ 0,0,0,1]
+
+
+def GetBoneMat(edit_bone):
+ z_vec = (edit_bone.tail - edit_bone.head).normalize()
+ y_vec = Vector((0,1,0))
+ if abs(z_vec.dot(y_vec)) > 0.95:
+ return GetBoneMatZ(edit_bone)
+ x_vec = z_vec.cross(y_vec).normalize()
+ y_vec = z_vec.cross(x_vec).normalize()
+
+ return [x_vec[0], x_vec[2], -x_vec[1], 0,
+ y_vec[0], y_vec[2], -y_vec[1], 0,
+ z_vec[0], z_vec[2], -z_vec[1], 0,
+ 0,0,0,1]
+
+def CalcBoneMats(data):
+ for bone in data.edit_bones:
+ if not bone.get("mat"):
+ bone["mat"] = GetBoneMat(bone)
+
+def ReadANM(filepath):
+ file = open(filepath, "rb")
+
+ data = anm_types.ANMdata()
+ data.name = filepath.split("\\")[-1]
+ data.version = LoadInt(file)
+ data.looping = LoadBool(file)
+ if data.version > 0:
+ data.start = LoadInt(file)
+ data.end = LoadInt(file)
+ num_keyframes = LoadInt(file)
+
+ for i in range(num_keyframes):
+ keyframe = anm_types.Keyframe()
+ keyframe.time = LoadInt(file)
+ if data.version >= 4:
+ num_weights = LoadInt(file)
+ keyframe.weights.fromfile(file, num_weights)
+ num_bone_mats = LoadInt(file)
+ for j in range(num_bone_mats):
+ mat = array.array('f')
+ mat.fromfile(file, 16)
+ keyframe.mats.append(mat)
+ if data.version >= 2:
+ num_events = LoadInt(file)
+ for j in range(num_events):
+ event = anm_types.Event()
+ event.which_bone = LoadInt(file)
+ string_size = LoadInt(file)
+ string_array = array.array("B")
+ string_array.fromfile(file, string_size)
+ event.string = string_array.tostring().decode("utf-8")
+ keyframe.events.append(event)
+ if data.version >= 3:
+ num_ik_bones = LoadInt(file)
+ for j in range(num_ik_bones):
+ ik_bone = anm_types.IKBone()
+ ik_bone.start.fromfile(file, 3)
+ ik_bone.end.fromfile(file, 3)
+ path_length = LoadInt(file)
+ ik_bone.bone_path.fromfile(file, path_length)
+ string_size = LoadInt(file)
+ string_array = array.array("B")
+ string_array.fromfile(file, string_size)
+ ik_bone.string = string_array.tostring().decode("utf-8")
+ keyframe.ik_bones.append(ik_bone)
+ #print(ik_bone)
+ if data.version >= 5:
+ num_shape_keys = LoadInt(file)
+ for j in range(num_shape_keys):
+ shape_key = anm_types.ShapeKey()
+ shape_key.weight = LoadFloat(file)
+ string_size = LoadInt(file)
+ string_array = array.array("B")
+ string_array.fromfile(file, string_size)
+ shape_key.string = string_array.tostring().decode("utf-8")
+ keyframe.shape_keys.append(shape_key)
+ if data.version >= 6:
+ num_status_keys = LoadInt(file)
+ for j in range(num_status_keys):
+ status_key = anm_types.StatusKey()
+ status_key.weight = LoadFloat(file)
+ string_size = LoadInt(file)
+ string_array = array.array("B")
+ string_array.fromfile(file, string_size)
+ status_key.string = string_array.tostring().decode("utf-8")
+ keyframe.status_keys.append(status_key)
+ data.keyframes.append(keyframe)
+
+
+ '''
+ print("Version: ", data.version)
+ print("Looping: ", data.looping)
+ print("Start: ", data.start)
+ print("End: ", data.end)
+ print("Num keyframes: ", len(data.keyframes))
+ id = 0
+ for keyframe in data.keyframes:
+ print("Keyframe ", id, ":")
+ print(" Time: ", keyframe.time)
+ print(" Weights: ", keyframe.weights)
+ print(" Mats: ", len(keyframe.mats))
+ print(" Events: ", keyframe.events)
+ print(" IKBones: ", keyframe.ik_bones)
+ '''
+ return data
+
+def to_array(array, idarray):
+ for item in idarray:
+ array.append(item)
+
+
+def PoseFromKeyframe(data, key_id, arm_obj):
+ arm_data = arm_obj.data
+ translation = []
+ matrices = []
+ for mat in data.keyframes[key_id].mats:
+ matrix = Matrix([mat[0], -mat[2], mat[1], mat[3]],
+ [mat[4], -mat[6], mat[5], mat[7]],
+ [mat[8], -mat[10], mat[9], mat[11]],
+ [mat[12], -mat[14], mat[13], mat[15]])
+
+ '''bpy.ops.object.add(type='EMPTY', location=(mat[12]+2, -mat[14], mat[13]))
+ ob = bpy.context.scene.objects.active
+ ob.empty_draw_type = 'ARROWS'
+ ob.empty_draw_size = 0.1
+ ob.rotation_mode = 'QUATERNION'
+ ob.rotation_quaternion = matrix.rotation_part().to_quat()'''
+ translation.append(matrix.translation_part())
+ matrices.append(matrix.rotation_part())
+
+ bpy.context.scene.objects.active = arm_obj
+
+ bnt = BoneNameTranslator(arm_data)
+ for bone in arm_data.bones:
+ if bnt.ToBoneName(bone.name):
+ bone.name = bnt.ToBoneName(bone.name)
+
+ initial_translation = []
+ initial_matrices = []
+ bone_matrices = []
+ inv_bone_matrices = []
+ num = 0
+ for iter_bone in arm_data.bones:
+ if iter_bone.name == "root" or iter_bone.layers[0] == False:
+ continue
+ name = "Bone_"+str(num)
+ bone = arm_data.bones[name]
+ bone.use_hinge
+ mat = array.array('f')
+ to_array(mat, bone["mat"])
+ matrix = Matrix([mat[0], -mat[2], mat[1], mat[3]],
+ [mat[4], -mat[6], mat[5], mat[7]],
+ [mat[8], -mat[10], mat[9], mat[11]],
+ [mat[12], -mat[14], mat[13], mat[15]])
+
+ initial_translation.append(matrix.translation_part())
+ initial_matrices.append(matrix.rotation_part())
+ bone_matrices.append(bone.matrix_local.rotation_part())
+ inv_bone_matrices.append(bone_matrices[-1].copy().invert())
+ '''
+ bpy.ops.object.add(type='EMPTY', location=(mat[12]+4, -mat[14], mat[13]))
+ ob = bpy.context.scene.objects.active
+ ob.empty_draw_type = 'ARROWS'
+ ob.empty_draw_size = 0.1
+ ob.rotation_mode = 'QUATERNION'
+ ob.rotation_quaternion = matrix.rotation_part().to_quat()
+
+ bpy.ops.object.add(type='EMPTY', location=(mat[12]+6, -mat[14], mat[13]))
+ ob = bpy.context.scene.objects.active
+ ob.empty_draw_type = 'ARROWS'
+ ob.empty_draw_size = 0.1
+ ob.rotation_mode = 'QUATERNION'
+ ob.rotation_quaternion = (matrices[num]*initial_matrices[num].copy().invert()).rotation_part().to_quat()'''
+ num = num + 1
+ bpy.context.scene.objects.active = arm_obj
+
+ #for num in range(len(matrices)):
+ #matrices[num] = inv_bone_matrices[num] * matrices[num] * bone_matrices[num]
+ #initial_matrices[num] = inv_abone_matrices[num] * initial_matrices[num] * bone_matrices[num]
+
+ initial_inv_matrices = []
+ for mat in initial_matrices:
+ matrix = mat.copy()
+ matrix.invert()
+ initial_inv_matrices.append(matrix)
+
+ ident = Matrix([1,0,0],[0,1,0],[0,0,1])
+ local_matrices = []
+ num = 0
+ root_translation = Vector((0,0,0))
+ for matrix in matrices:
+ name = "Bone_"+str(num)
+ bone = arm_data.bones[name]
+ parent_bone = bone.parent
+ if not parent_bone or parent_bone.name == "root":
+ local_matrices.append(matrix*initial_inv_matrices[num])
+ curr_translation = translation[num] - initial_translation[num]
+ #offset = matrix * initial_inv_matrices[num] * inv_bone_matrices[num] * Vector((0,bone.length*0.5,0));
+ #curr_translation = curr_translation + Vector((offset[0], offset[1], offset[2]))
+ root_translation = curr_translation
+ root_translation[1] = root_translation[1]*-1
+ root_translation[0] = root_translation[0]*-1
+ offset = inv_bone_matrices[num] * local_matrices[num] * bone_matrices[num] * Vector((0,bone.length*0.5,0));
+ offset[1],offset[2] = offset[2], offset[1]
+ offset[2] *= -1
+ offset[1] *= -1
+ root_translation += offset
+ else:
+ parent_id = int(parent_bone.name.split("_")[-1])
+ inv_mat = (matrices[parent_id]*initial_inv_matrices[parent_id]).invert()
+ local_matrices.append(inv_mat*matrix*initial_inv_matrices[num])
+ num = num + 1
+
+ #straight up = [1,0,0][0,0,-1],[0,1,0]
+ #straight right = [0,-1,0][1,0,0][0,0,1]
+ #straight left = [0,1,0][-1,0,0][0,0,1]
+ #local_matrices now contains local rotation matrices in world space
+ #time to convert them to bone space
+ num = 0
+ for matrix in local_matrices:
+ local_matrices[num] = inv_bone_matrices[num] * local_matrices[num] * bone_matrices[num]
+ num = num + 1
+
+ local_quats = []
+ for matrix in local_matrices:
+ local_quats.append(matrix.to_quat())
+
+ bpy.ops.object.mode_set(mode='OBJECT', toggle=False)
+ bpy.ops.object.mode_set(mode='POSE', toggle=False)
+
+ pose = arm_obj.pose
+ pose_bones = pose.bones
+
+ num = 0
+ for quat in local_quats:
+ name = "Bone_"+str(num)
+ bone = pose_bones[name]
+ if bone.rotation_mode == 'QUATERNION':
+ bone.rotation_quaternion = quat
+ else:
+ mode = bone.rotation_mode
+ euler = quat.to_euler(mode)
+ bone.rotation_euler = euler
+ num = num + 1
+
+ pose_bones["root"].location = root_translation
+
+ for bone in arm_data.bones:
+ if bnt.FromBoneName(bone.name):
+ bone.name = bnt.FromBoneName(bone.name)
+
+def ApplyAnimation(data):
+ arm_obj = bpy.context.scene.objects.active
+ if arm_obj.type != 'ARMATURE':
+ print("PHXBN armature must be selected")
+ return
+
+ bpy.ops.object.mode_set(mode='EDIT')
+ CalcBoneMats(arm_obj.data)
+ bpy.ops.object.mode_set(mode='OBJECT')
+
+ action = bpy.data.actions.new(data.name)
+ if not arm_obj.animation_data:
+ arm_obj.animation_data_create()
+ arm_obj.animation_data.action = action
+
+ '''pose_bones = arm_obj.pose.bones
+ for bone in pose_bones:
+ group = action.groups.add(bone.name)
+ for j in range(3):
+ action.fcurves.new("rotation_euler",j,bone.name)'''
+
+ action["Version"] = data.version
+ action["Looping"] = data.looping
+ action["Start"] = data.start
+ action["End"] = data.end
+
+ ik_bones = {}
+ ik_bone_keys = {}
+ for keyframe in data.keyframes:
+ for bone in keyframe.ik_bones:
+ if not bone.string in ik_bones:
+ ik_bones[bone.string] = bone
+ ik_bone_keys[bone.string] = {}
+ ik_bone_keys[bone.string][keyframe.time] = True
+
+ #print("IK Bones:\n",ik_bones)
+
+ pose_bones = arm_obj.pose.bones
+ index = 0
+ for bone in pose_bones:
+ for constraint in bone.constraints:
+ if constraint.type == 'IK' and not constraint.target:
+ bone.constraints.remove(constraint)
+ else:
+ index = index + 1
+
+ ik_constraints = []
+ for bone_string in ik_bones:
+ bone = ik_bones[bone_string]
+ constraint = pose_bones["Bone_"+str(bone.bone_path[-1])].constraints.new('IK')
+ constraint.chain_count = len(bone.bone_path)
+ constraint.name = bone_string
+ ik_constraints.append(constraint)
+
+ bone_quat = {}
+ for i in range(len(data.keyframes)):
+ the_frame = data.keyframes[i].time/1000*bpy.context.scene.render.fps
+ PoseFromKeyframe(data, i, arm_obj)
+ for bone in pose_bones:
+ if bone.rotation_mode == 'QUATERNION':
+ if bone.name in bone_quat:
+ if bone.rotation_quaternion.dot(bone_quat[bone.name]) < 0:
+ bone.rotation_quaternion.negate()
+ bone.keyframe_insert("rotation_quaternion", frame = the_frame)
+ bone_quat[bone.name] = bone.rotation_quaternion.copy()
+ else:
+ bone.keyframe_insert("rotation_euler", frame = the_frame)
+ for constraint in ik_constraints:
+ if data.keyframes[i].time in ik_bone_keys[constraint.name]:
+ constraint.influence = 1
+ else:
+ constraint.influence = 0
+ constraint.keyframe_insert("influence", frame = the_frame)
+ pose_bones["root"].keyframe_insert("location", frame = the_frame)
+ for shape_key in data.keyframes[i].shape_keys:
+ arm_obj[shape_key.string] = shape_key.weight
+ arm_obj.keyframe_insert("[\""+shape_key.string+"\"]", frame = the_frame)
+
+def Load(filepath):
+ data = ReadANM(filepath)
+ if not data:
+ return
+ ApplyAnimation(data)
+
+class ANMImporter(bpy.types.Operator):
+ '''Load Phoenix Animation'''
+ bl_idname = "import_armature.anm"
+ bl_label = "Import ANM"
+
+ filepath = StringProperty(name="File Path", description="Filepath used for importing the ANM file", maxlen=1024, default="")
+
+ def execute(self, context):
+ print("Filepath:",self.properties.filepath)
+ Load(self.properties.filepath)
+
+ filename = self.properties.filepath.split("\\")[-1]
+ #convert the filename to an object name
+ objName = bpy.path.display_name(filename)
+ print("Filename:",filename)
+
+ #mesh = readMesh(self.properties.filepath, objName)
+ #addMeshObj(mesh, objName)
+
+ return {'FINISHED'}
+
+ def invoke(self, context, event):
+ wm = context.window_manager
+ wm.add_fileselect(self)
+ return {'RUNNING_MODAL'}
+
+#Load("C:\\Users\\David\\Desktop\\Wolfire SVN\\Project\\Data\\Animations\\run.anm")
+#Load("C:\\Users\\David\\Desktop\\export.anm")
diff --git a/Data/BlenderScript/addons/io_canm/__init__.py b/Data/BlenderScript/addons/io_canm/__init__.py
new file mode 100644
index 00000000..989abb02
--- /dev/null
+++ b/Data/BlenderScript/addons/io_canm/__init__.py
@@ -0,0 +1,29 @@
+bl_addon_info = {
+ 'name': 'Import/Export: Phoenix camera animation (.canm)...',
+ 'author': 'David Rosen',
+ 'version': '0.1',
+ 'blender': (2, 5, 4),
+ 'location': 'File > Import/Export > CANM',
+ 'description': 'Export Phoenix camera animation (.canm format)',
+ 'warning': '', # used for warning icon and text in addons panel
+ 'category': 'Import/Export'}
+
+import bpy
+
+def menu_export(self, context):
+ from io_canm import export_canm
+ import os
+ default_path = os.path.splitext(bpy.data.filepath)[0] + ".canm"
+ self.layout.operator(export_canm.CANMExporter.bl_idname, text="Phoenix Camera Animation (.canm)").filepath = default_path
+
+def register():
+ from io_canm import export_canm
+ bpy.types.register(export_canm.CANMExporter)
+ bpy.types.INFO_MT_file_export.append(menu_export)
+
+def unregister():
+ from io_canm import export_canm
+ bpy.types.INFO_MT_file_export.remove(menu_export)
+
+if __name__ == "__main__":
+ register()
diff --git a/Data/BlenderScript/addons/io_canm/export_canm.py b/Data/BlenderScript/addons/io_canm/export_canm.py
new file mode 100644
index 00000000..0cf5b68e
--- /dev/null
+++ b/Data/BlenderScript/addons/io_canm/export_canm.py
@@ -0,0 +1,136 @@
+import bpy
+import array
+from bpy.props import *
+from mathutils import Matrix, Euler, Vector, Quaternion
+
+class dfcurvekeyclass:
+ def __init__(self):
+ self.co = array.array('f')
+ self.handle_left = array.array('f')
+ self.handle_right = array.array('f')
+
+class fcurveclass:
+ def __init__(self):
+ self.data_path = ""
+ self.fcurvekey = []
+ self.array_index = 0
+
+class CANMdata:
+ def __init__(self):
+ self.version = 0
+ self.rotation_mode = ""
+ self.fcurves = []
+ self.fps = 0
+
+def WriteFloat(file, val):
+ int_loader = array.array('f')
+ int_loader.append(val)
+ int_loader.tofile(file)
+
+def Get4ByteIntArray():
+ var = array.array('l')
+ if var.itemsize != 4:
+ var = array.array('i')
+ return var
+
+def WriteInt(file, val):
+ int_loader = Get4ByteIntArray()
+ int_loader.append(val)
+ int_loader.tofile(file)
+
+def WriteBool(file, val):
+ bool_loader = array.array('B')
+ bool_loader.append(val)
+ bool_loader.tofile(file)
+
+def WriteString(file, str):
+ string_array = array.array("B")
+ string_array.fromstring(str.encode("utf-8"))
+ WriteInt(file, len(str))
+ string_array.tofile(file)
+
+def WriteCANM(filepath, data):
+ file = open(filepath, "wb")
+
+ data.version = 1
+ num_fcurves = len(data.fcurves)
+ WriteInt(file, data.version)
+ WriteInt(file, data.fps)
+ WriteString(file, data.rotation_mode)
+ WriteInt(file, num_fcurves)
+
+ for fcurve in data.fcurves:
+ WriteString(file, fcurve.data_path)
+ WriteInt(file, fcurve.array_index)
+ WriteInt(file, len(fcurve.fcurvekey))
+ for key in fcurve.fcurvekey:
+ key.co.tofile(file)
+ key.handle_left.tofile(file)
+ key.handle_right.tofile(file)
+
+def GetAnimation():
+ cam_obj = bpy.context.scene.objects.active
+ if cam_obj and cam_obj.type != 'CAMERA':
+ print("Camera must be selected")
+ return 0
+ fcurves = []
+ if cam_obj.animation_data.action:
+ for fcurve in cam_obj.animation_data.action.fcurves:
+ fcurves.append(fcurve)
+ if cam_obj.data.animation_data.action:
+ for fcurve in cam_obj.data.animation_data.action.fcurves:
+ fcurves.append(fcurve)
+
+ fcurves_c = []
+ for fcurve in fcurves:
+ #print(fcurve.data_path)
+ fcurve_c = fcurveclass()
+ fcurve_c.data_path = fcurve.data_path
+ fcurve_c.array_index = fcurve.array_index
+ for keyframe in fcurve.keyframe_points:
+ keyframe_c = dfcurvekeyclass()
+ keyframe_c.co.append(keyframe.co[0])
+ keyframe_c.co.append(keyframe.co[1])
+ keyframe_c.handle_left.append(keyframe.handle_left[0])
+ keyframe_c.handle_left.append(keyframe.handle_left[1])
+ keyframe_c.handle_right.append(keyframe.handle_right[0])
+ keyframe_c.handle_right.append(keyframe.handle_right[1])
+ fcurve_c.fcurvekey.append(keyframe_c)
+ fcurves_c.append(fcurve_c)
+
+ data = CANMdata()
+ data.fcurves = fcurves_c
+ data.rotation_mode = cam_obj.rotation_mode
+ data.fps = bpy.context.scene.render.fps
+
+ return data
+
+def Save(filepath):
+ data = GetAnimation()
+ if not data:
+ return
+ WriteCANM(filepath, data)
+
+class CANMExporter(bpy.types.Operator):
+ '''Save Phoenix Bones armature'''
+ bl_idname = "export_mesh.canm"
+ bl_label = "Export CANM"
+
+ filepath = StringProperty(name="File Path", description="Filepath used for exporting the CANM file", maxlen= 1024, default= "")
+
+ check_existing = BoolProperty(name="Check Existing", description="Check and warn on overwriting existing files", default=True, options={'HIDDEN'})
+
+ def execute(self, context):
+ Save(self.properties.filepath)
+ return {'FINISHED'}
+
+ def invoke(self, context, event):
+ wm = context.window_manager
+ wm.add_fileselect(self)
+ return {'RUNNING_MODAL'}
+
+#Save("C:\\Users\\David\\Desktop\\export.anm")
+#Save("Desktop/export.anm")
+#BakeConstraints(bpy.context.scene.objects.active)
+data = GetAnimation()
+WriteCANM("C:\\Users\\David\\Desktop\\test.canm", data) \ No newline at end of file
diff --git a/Data/BlenderScript/addons/io_obj_shape/__init__.py b/Data/BlenderScript/addons/io_obj_shape/__init__.py
new file mode 100644
index 00000000..b0b38c38
--- /dev/null
+++ b/Data/BlenderScript/addons/io_obj_shape/__init__.py
@@ -0,0 +1,38 @@
+bl_addon_info = {
+ 'name': 'Import/Export: Objects as shape keys(.obj)...',
+ 'author': 'David Rosen',
+ 'version': '0.1',
+ 'blender': (2, 5, 4),
+ 'location': 'File > Import/Export > OBJ shape key',
+ 'description': 'Import shape key (.obj format)',
+ 'warning': '', # used for warning icon and text in addons panel
+ 'category': 'Import/Export'}
+
+import bpy
+
+def menu_import(self, context):
+ from io_obj_shape import import_obj_shape
+ self.layout.operator(import_obj_shape.ObjShapeImporter.bl_idname, text="Shape key (.obj)").filepath = "*.obj"
+
+def menu_export(self, context):
+ from io_obj_shape import export_obj_shape
+ import os
+ default_path = os.path.splitext(bpy.data.filepath)[0] + ".obj"
+ self.layout.operator(export_obj_shape.ObjShapeExporter.bl_idname, text="Shape key (.obj)").filepath = default_path
+
+def register():
+ from io_obj_shape import import_obj_shape, export_obj_shape
+ bpy.types.register(import_obj_shape.ObjShapeImporter)
+ #bpy.types.register(export_obj_shape.ObjShapeExporter)
+ bpy.types.INFO_MT_file_import.append(menu_import)
+ #bpy.types.INFO_MT_file_export.append(menu_export)
+
+def unregister():
+ from io_obj_shape import import_obj_shape, export_obj_shape
+ #bpy.types.unregister(import_obj_shape.ObjShapeImporter)
+ #bpy.types.unregister(export_obj_shape.ObjShapeExporter)
+ bpy.types.INFO_MT_file_import.remove(menu_import)
+ #bpy.types.INFO_MT_file_export.remove(menu_export)
+
+if __name__ == "__main__":
+ register()
diff --git a/Data/BlenderScript/addons/io_obj_shape/export_obj_shape.py b/Data/BlenderScript/addons/io_obj_shape/export_obj_shape.py
new file mode 100644
index 00000000..d1d77f9a
--- /dev/null
+++ b/Data/BlenderScript/addons/io_obj_shape/export_obj_shape.py
@@ -0,0 +1,26 @@
+import bpy
+from bpy.props import *
+
+def Save(filepath):
+ return
+
+class ObjShapeExporter(bpy.types.Operator):
+ '''Save shape key as OBJ'''
+ bl_idname = "export_obj_shape.obj"
+ bl_label = "Export shape key as OBJ"
+
+ filepath = StringProperty(name="File Path", description="Filepath used for exporting the OBJ file", maxlen= 1024, default= "")
+
+ check_existing = BoolProperty(name="Check Existing", description="Check and warn on overwriting existing files", default=True, options={'HIDDEN'})
+
+ def execute(self, context):
+ Save(self.properties.filepath)
+ return {'FINISHED'}
+
+ def invoke(self, context, event):
+ wm = context.window_manager
+ wm.add_fileselect(self)
+ return {'RUNNING_MODAL'}
+
+#Save("C:\\Users\\David\\Desktop\\export.anm")
+#BakeConstraints(bpy.context.scene.objects.active)
diff --git a/Data/BlenderScript/addons/io_obj_shape/import_obj_shape.py b/Data/BlenderScript/addons/io_obj_shape/import_obj_shape.py
new file mode 100644
index 00000000..5e8df89d
--- /dev/null
+++ b/Data/BlenderScript/addons/io_obj_shape/import_obj_shape.py
@@ -0,0 +1,47 @@
+import bpy
+from bpy.props import *
+from io_scene_obj.import_obj import load as load_obj
+
+def Load(filepath):
+ obj = bpy.context.scene.objects.active
+
+ obj_dict = {}
+ for object in bpy.context.scene.objects:
+ obj_dict[object.name] = True
+
+ load_obj(None, bpy.context, filepath)
+
+ new_obj = []
+ for object in bpy.context.scene.objects:
+ if not object.name in obj_dict:
+ new_obj.append(object)
+
+ new_obj[0].name = filepath.split('\\')[-1].split('.')[0]
+ new_obj[0].select = True
+ bpy.ops.object.join_shapes()
+
+ bpy.ops.object.select_all(action = 'DESELECT')
+ for object in new_obj:
+ object.select = True
+ bpy.ops.object.delete()
+
+class ObjShapeImporter(bpy.types.Operator):
+ '''Load a Wavefront OBJ file as a shape key'''
+ bl_idname = "import_obj_shape.obj"
+ bl_label = "Import OBJ shape key"
+
+ filepath = StringProperty(name="File Path", description="Filepath used for importing the OBJ file", maxlen=1024, default="")
+
+ def execute(self, context):
+ print("Filepath:",self.properties.filepath)
+ Load(self.properties.filepath)
+
+ return {'FINISHED'}
+
+ def invoke(self, context, event):
+ wm = context.window_manager
+ wm.add_fileselect(self)
+ return {'RUNNING_MODAL'}
+
+#Load("C:\\Users\\David\\Desktop\\Wolfire SVN\\Project\\Data\\Animations\\run.anm")
+#Load("C:\\Users\\David\\Desktop\\export.anm")
diff --git a/Data/BlenderScript/addons/io_phxbn/__init__.py b/Data/BlenderScript/addons/io_phxbn/__init__.py
new file mode 100644
index 00000000..6f40a4ae
--- /dev/null
+++ b/Data/BlenderScript/addons/io_phxbn/__init__.py
@@ -0,0 +1,38 @@
+bl_addon_info = {
+ 'name': 'Import/Export: Phoenix bones (.phxbn)...',
+ 'author': 'David Rosen',
+ 'version': '0.1',
+ 'blender': (2, 5, 4),
+ 'location': 'File > Import/Export > PHXBN',
+ 'description': 'Import Phoenix Bones (.phxbn format)',
+ 'warning': '', # used for warning icon and text in addons panel
+ 'category': 'Import/Export'}
+
+import bpy
+
+def menu_import(self, context):
+ from io_phxbn import import_phxbn
+ self.layout.operator(import_phxbn.PHXBNImporter.bl_idname, text="Phoenix Bones (.phxbn)").filepath = "*.phxbn"
+
+def menu_export(self, context):
+ from io_phxbn import export_phxbn
+ import os
+ default_path = os.path.splitext(bpy.data.filepath)[0] + ".phxbn"
+ self.layout.operator(export_phxbn.PHXBNExporter.bl_idname, text="Phoenix Bones (.phxbn)").filepath = default_path
+
+def register():
+ from io_phxbn import import_phxbn, export_phxbn
+ bpy.types.register(import_phxbn.PHXBNImporter)
+ bpy.types.register(export_phxbn.PHXBNExporter)
+ bpy.types.INFO_MT_file_import.append(menu_import)
+ bpy.types.INFO_MT_file_export.append(menu_export)
+
+def unregister():
+ from io_phxbn import import_phxbn, export_phxbn
+ #bpy.types.unregister(import_phxbn.PHXBNImporter)
+ #bpy.types.unregister(export_phxbn.PHXBNExporter)
+ bpy.types.INFO_MT_file_import.remove(menu_import)
+ bpy.types.INFO_MT_file_export.remove(menu_export)
+
+if __name__ == "__main__":
+ register()
diff --git a/Data/BlenderScript/addons/io_phxbn/export_phxbn.py b/Data/BlenderScript/addons/io_phxbn/export_phxbn.py
new file mode 100644
index 00000000..1b758f58
--- /dev/null
+++ b/Data/BlenderScript/addons/io_phxbn/export_phxbn.py
@@ -0,0 +1,608 @@
+import array
+import bpy
+from bpy.props import *
+from io_phxbn import phxbn_types
+from mathutils import Vector, Matrix
+import operator
+
+def Get4ByteIntArray():
+ var = array.array('l')
+ if var.itemsize != 4:
+ var = array.array('i')
+ return var
+
+def UnCenterArmatureInMesh(data, mesh_obj):
+ mesh = mesh_obj.data
+
+ min_point = array.array('f')
+ max_point = array.array('f')
+
+ for i in range(3):
+ min_point.append(mesh.vertices[0].co[i])
+ max_point.append(mesh.vertices[0].co[i])
+
+ for vert in mesh.vertices:
+ for i in range(3):
+ min_point[i] = min(min_point[i], vert.co[i])
+ max_point[i] = max(max_point[i], vert.co[i])
+
+ mid_point = Vector();
+ for i in range(3):
+ mid_point[i] = ((min_point[i] + max_point[i]) * 0.5)
+
+ mid_point = mesh_obj.matrix_world * mid_point;
+
+ #print("Midpoint: ",mid_point)
+
+ for vert in data.vertices:
+ for i in range(3):
+ vert[i] = vert[i] - mid_point[i]
+
+def to_array(array, idarray):
+ for item in idarray:
+ array.append(item)
+
+class BoneWeight:
+ def __init__(self, id, weight):
+ self.id = id
+ self.weight = weight
+
+def AddConnectingBones(arm_obj):
+ new_bones = []
+ arm_data = arm_obj.data
+ for bone in arm_data.edit_bones:
+ if bone.name == "root" or bone.layers[29] == False or not bone.parent or bone.parent.name == "root" or bone.parent.layers[29] == False:
+ continue
+ if bone.head != bone.parent.tail:
+ bpy.ops.armature.bone_primitive_add(name="connector")
+ new_bone = arm_data.edit_bones[-1]
+ new_bone.head = bone.parent.tail
+ new_bone.tail = bone.head
+ new_bone.parent = bone.parent
+ new_bone["Mass"] = 0.01
+ for i in range(32):
+ new_bone.layers[i] = False
+ new_bone.layers[29] = True
+ bone.parent = new_bone
+ new_bones.append(new_bone.name)
+
+ bpy.context.scene.update()
+ bpy.ops.object.mode_set(mode='OBJECT')
+ bpy.ops.object.mode_set(mode='EDIT')
+
+def EnforceBoneNaming(arm_data):
+ name_shift = {}
+ bone_labels = {}
+ for bone in arm_data.edit_bones:
+ if bone.name == "root" or bone.layers[29] == False:
+ continue
+ if bone.name.split("_")[0] == "Bone":
+ bone_id = int(bone.name.split("_")[-1])
+ bone_labels[bone_id] = True
+ name_shift[bone.name] = bone.name
+
+ index = 0
+ for bone in arm_data.edit_bones:
+ if bone.name == "root" or bone.layers[29] == False:
+ continue
+ if bone.name.split("_")[0] != "Bone":
+ while index in bone_labels:
+ index = index + 1
+ name_shift[bone.name] = "Bone_"+str(index)
+ bone.name = name_shift[bone.name]
+ index = index+1
+ return name_shift
+
+
+def GetClosestRealChildren(bone):
+ list = []
+ for child in bone.children:
+ if not child.layers[29]:
+ new_list = GetClosestRealChildren(child)
+ for line in new_list:
+ list.append(line)
+ continue
+ list.append(child)
+ return list
+
+class Point():
+ def __init__(self):
+ self.head_of = []
+ self.tail_of = []
+ self.id = -1
+
+bone_dict = {}
+
+#Create a dictionary like this: bone_dict["Bone_0"] = {"Point IDs",[point, point]}
+def CreateBonePointDictionary(bones):
+ point_head = {}
+ point_tail = {}
+
+ first_point = Point()
+ first_point.head_of = GetClosestRealChildren(bones["root"])
+
+ points = [first_point]
+
+ to_process = GetClosestRealChildren(bones["root"])
+
+ while len(to_process) > 0:
+ bone = to_process.pop(0)
+ point = Point()
+ point.tail_of.append(bone)
+ new_list = GetClosestRealChildren(bone)
+ for line in new_list:
+ point.head_of.append(line)
+ to_process.append(line)
+ points.append(point)
+
+ for point in points:
+ for bone in point.head_of:
+ point_head[bone] = point
+ for bone in point.tail_of:
+ point_tail[bone] = point
+
+ index = 0
+ for point in points:
+ if point.id != -1:
+ continue
+ point.id = index
+ index += 1
+
+ for bone in bones:
+ if not bone in point_head or not bone in point_tail:
+ continue
+ point_ids = Get4ByteIntArray()
+ point_ids.append(point_head[bone].id)
+ point_ids.append(point_tail[bone].id)
+ if not bone.name in bone_dict:
+ bone_dict[bone.name] = {}
+ #print("Added ", bone.name)
+ bone_dict[bone.name]["Point IDs"] = point_ids
+
+ #print("Created bone dict")
+ #print(bone_dict)
+
+def GetParentIDs(data):
+ parent_ids = Get4ByteIntArray()
+ for bone in data.edit_bones:
+ if bone.name.split("_")[0] != "Bone":
+ continue
+ bone_id = int(bone.name.split("_")[-1])
+ while bone_id >= len(parent_ids):
+ parent_ids.append(-1)
+ if not bone.parent or bone.parent.name == "root" or bone.parent.layers[29] == False:
+ continue
+ parent_bone_id = int(bone.parent.name.split("_")[-1])
+ parent_ids[bone_id] = parent_bone_id
+
+ data["parent_ids"] = parent_ids
+
+def GetParents(data):
+ parents = Get4ByteIntArray()
+ for bone in data.edit_bones:
+ if not bone_dict.get(bone.name) or not bone_dict[bone.name].get("Point IDs"):
+ continue
+ point_ids = bone_dict[bone.name]["Point IDs"]
+ while len(parents) <= point_ids[1]:
+ parents.append(-1)
+ parents[point_ids[1]] = point_ids[0]
+ data["parents"] = parents
+
+def GetBoneMatZ(edit_bone):
+ z_vec = (edit_bone.tail - edit_bone.head).normalize()
+ x_vec = Vector((0,0,1))
+ y_vec = x_vec.cross(z_vec).normalize()
+ x_vec = y_vec.cross(z_vec).normalize()
+
+ return [x_vec[0], x_vec[2], -x_vec[1], 0,
+ y_vec[0], y_vec[2], -y_vec[1], 0,
+ z_vec[0], z_vec[2], -z_vec[1], 0,
+ 0,0,0,1]
+
+
+def GetBoneMat(edit_bone):
+ z_vec = (edit_bone.tail - edit_bone.head).normalize()
+ y_vec = Vector((0,1,0))
+ if abs(z_vec.dot(y_vec)) > 0.95:
+ return GetBoneMatZ(edit_bone)
+ x_vec = z_vec.cross(y_vec).normalize()
+ y_vec = z_vec.cross(x_vec).normalize()
+
+ return [x_vec[0], x_vec[2], -x_vec[1], 0,
+ y_vec[0], y_vec[2], -y_vec[1], 0,
+ z_vec[0], z_vec[2], -z_vec[1], 0,
+ 0,0,0,1]
+
+def GetJoints(arm_obj, name_shift):
+ joints = []
+ for bone in arm_obj.pose.bones:
+ for constraint in bone.constraints:
+ name_parts = constraint.name.split("_")
+ if constraint.type == 'LIMIT_ROTATION' and constraint.owner_space == 'LOCAL' and name_parts[0] == "RGDL":
+ freedom = 0
+ if constraint.min_x != constraint.max_x:
+ freedom += 1
+ if constraint.min_y != constraint.max_y:
+ freedom += 1
+ if constraint.min_z != constraint.max_z:
+ freedom += 1
+
+ joint = phxbn_types.Joint()
+ joint.bone_ids.append(int(bone.name.split("_")[1]))
+ if bone.parent and len(bone.parent.name.split("_")) > 1:
+ joint.bone_ids.append(int(bone.parent.name.split("_")[1]))
+ else:
+ joint.bone_ids.append(0)
+
+ if name_parts[1] != "Limit Rotation":
+ #print("Trying to find: "+name_parts[1])
+ if name_shift.get(name_parts[1]) and \
+ arm_obj.pose.bones.get(name_shift[name_parts[1]]):
+ #print("Constraint name: "+name_parts[1])
+ #print("Constraint name shift: "+name_shift[name_parts[1]])
+ #print("Constraint name suffix: "+name_shift[name_parts[1]].split("_")[1])
+ joint.bone_ids[1] = int(name_shift[name_parts[1]].split("_")[1])
+ else :
+ print("Could not find: "+name_parts[1])
+
+ if freedom == 0:
+ joint.type.append(phxbn_types._fixed_joint)
+ joints.append(joint)
+ if freedom == 1:
+ joint.type.append(phxbn_types._hinge_joint)
+ mat = arm_obj.matrix_world * bone.matrix
+ axis = Vector((mat[0][0], mat[0][1], mat[0][2])).normalize()
+ joint.axis.append(axis[0])
+ joint.axis.append(axis[2])
+ joint.axis.append(-axis[1])
+ joint.stop_angles.append(constraint.min_x)
+ joint.stop_angles.append(constraint.max_x)
+ joints.append(joint)
+ if freedom > 1:
+ joint.type.append(phxbn_types._amotor_joint)
+ joint.stop_angles.append(constraint.min_y)
+ joint.stop_angles.append(constraint.max_y)
+ joint.stop_angles.append(constraint.min_x)
+ joint.stop_angles.append(constraint.max_x)
+ joint.stop_angles.append(constraint.min_z)
+ joint.stop_angles.append(constraint.max_z)
+ joints.append(joint)
+
+ return joints
+ '''if not arm_data.get("num_special_joints"):
+ arm_data["num_special_joints"] = 0
+ num_joints = arm_data["num_special_joints"]
+ for num in range(num_joints):
+ joint = phxbn_types.Joint()
+ name = "joint_" + str(num)
+ joint.type.append(arm_data[name + "_type"])
+ if joint.type[0] == phxbn_types._amotor_joint:
+ to_array(joint.stop_angles, arm_data[name + "_angles"])
+ elif joint.type[0] == phxbn_types._hinge_joint:
+ to_array(joint.stop_angles, arm_data[name + "_angles"])
+ to_array(joint.bone_ids, arm_data[name + "_bone_ids"])
+ if joint.type[0] == phxbn_types._hinge_joint:
+ to_array(joint.axis, arm_data[name+"_axis"])
+ data.joints.append(joint)'''
+
+def GetIKBones(arm_obj, name_shift):
+ ik_bones = []
+ for bone in arm_obj.pose.bones:
+ if arm_obj.data.edit_bones[bone.name].layers[29] == False:
+ continue
+ for constraint in bone.constraints:
+ if constraint.type == 'IK' and not constraint.target:
+ bone_id = int(bone.name.split("_")[-1])
+ ik_length = constraint.chain_count
+ #print("IK bone "+constraint.name+": bone_"+str(bone_id)+" with chain count: "+str(ik_length))
+ ik_bone = phxbn_types.IKBone()
+ ik_bone.name = constraint.name
+ ik_bone.bone = bone_id
+ ik_bone.chain = ik_length
+ ik_bones.append(ik_bone)
+ return ik_bones
+
+def PHXBNFromArmature():
+ scene = bpy.context.scene
+ old_armature_object = scene.objects.active
+
+ if not old_armature_object or old_armature_object.type != 'ARMATURE':
+ print ("Must select armature before exporting PHXBN")
+ return
+
+ if len(old_armature_object.children) == 0 or old_armature_object.children[0].type != 'MESH':
+ print("Armature must be the parent of a mesh.")
+ return
+
+ #Make a copy of the object so we don't mess up the original
+ bpy.ops.object.mode_set(mode='OBJECT')
+ bpy.ops.object.duplicate()
+ arm_obj = scene.objects.active
+ print ("Reading PHXBN from armature: ", arm_obj.name)
+
+ #Set to edit mode so we can add connecting bones
+ bpy.ops.object.mode_set(mode='EDIT')
+ AddConnectingBones(arm_obj)
+
+ #Extract metadata from armature object
+ arm_data = arm_obj.data
+ data = phxbn_types.PHXBNdata()
+
+ if arm_data.get("version"):
+ data.version.append(arm_data["version"])
+ else:
+ data.version.append(11)
+
+ if arm_data.get("rigging_stage"):
+ data.rigging_stage.append(arm_data["rigging_stage"])
+ else:
+ data.rigging_stage.append(1)
+
+ #Rename all bones to "root" or "Bone_X" where X is bone number
+ name_shift = EnforceBoneNaming(arm_data)
+
+ #Make an easy way to look up the points attached to each bone
+ CreateBonePointDictionary(arm_data.edit_bones)
+
+ #Count the number of points and bones
+ num_points = 0
+ num_bones = 0
+ for bone in arm_data.edit_bones:
+ if bone.name == "root" or bone.layers[29] == False:
+ continue
+ points = Get4ByteIntArray()
+ to_array(points, bone_dict[bone.name]["Point IDs"])
+ num_points = max(num_points, points[0]+1)
+ num_points = max(num_points, points[1]+1)
+ num_bones = num_bones + 1
+
+ #Create storage space for bone information
+ for i in range(num_bones):
+ points = Get4ByteIntArray()
+ data.bones.append(points)
+ data.bone_mass.append(0)
+ data.bone_com.append(0)
+ data.bone_com.append(0)
+ data.bone_com.append(0)
+ data.bone_parents.append(-1)
+ data.bone_swap.append(0)
+ for j in range(16):
+ data.bone_mat.append(0)
+
+ #Create space for point spatial information
+ vertices = array.array('f')
+ for i in range(num_points*3):
+ vertices.append(0)
+
+ #Fill in the bone information
+ num = 0
+ for bone in arm_data.edit_bones:
+ if bone.name == "root" or bone.layers[29] == False:
+ continue
+ points = Get4ByteIntArray()
+ to_array(points, bone_dict[bone.name]["Point IDs"])
+ bone_id = int(bone.name.split('_')[-1])
+ data.bones[bone_id].append(points[0])
+ data.bones[bone_id].append(points[1])
+ vertices[points[0]*3+0] = bone.head[0]
+ vertices[points[0]*3+1] = bone.head[1]
+ vertices[points[0]*3+2] = bone.head[2]
+ vertices[points[1]*3+0] = bone.tail[0]
+ vertices[points[1]*3+1] = bone.tail[1]
+ vertices[points[1]*3+2] = bone.tail[2]
+ mass = 0.1
+ if bone.get("Mass"):
+ mass = bone["Mass"]
+ data.bone_mass[bone_id] = mass
+ COM = [0.0,0.0,0.0]
+ if bone.get("COM"):
+ COM = bone["COM"]
+ data.bone_com[bone_id*3+0] = COM[0]
+ data.bone_com[bone_id*3+1] = COM[1]
+ data.bone_com[bone_id*3+2] = COM[2]
+ mat = GetBoneMat(bone)
+ if bone.get("mat"):
+ mat = bone["mat"]
+ for j in range(16):
+ data.bone_mat[bone_id*16+j] = mat[j]
+ swap = False
+ if bone.get("Swap"):
+ swap = bone["Swap"]
+ data.bone_swap[bone_id] = swap
+ if(data.bone_swap[bone_id]):
+ data.bones[bone_id][0], data.bones[bone_id][1] = \
+ data.bones[bone_id][1], data.bones[bone_id][0]
+ if not bone.parent or bone.parent.name == "root" or bone.parent.layers[29] == False:
+ data.bone_parents[bone_id] = -1
+ else:
+ parent = int(bone.parent.name.split('_')[-1])
+ data.bone_parents[bone_id] = parent
+ num = num + 1
+
+ #Store the position of each point
+ for i in range(num_points):
+ vertex = array.array('f')
+ vertex.append(vertices[i*3+0])
+ vertex.append(vertices[i*3+1])
+ vertex.append(vertices[i*3+2])
+ data.vertices.append(vertex)
+
+ #Extract joint and IK bone info
+ data.joints = GetJoints(arm_obj, name_shift)
+ data.ik_bones = GetIKBones(arm_obj, name_shift)
+
+ #Get the mesh object and subtract its midpoint from each skeleton point position
+ mesh_obj = old_armature_object.children[0]
+ print("Uncentering armature in mesh")
+ UnCenterArmatureInMesh(data, mesh_obj)
+
+ #Convert Blender coordinates back to Phoenix coordinates
+ for vertex in data.vertices:
+ vertex[1], vertex[2] = vertex[2], -vertex[1]
+
+ #Calculate the parent hierarchies for each bone and point
+ if not arm_data.get("parent_ids") or len(arm_data["parent_ids"]) != num_bones:
+ GetParentIDs(arm_data)
+ to_array(data.parent_ids, arm_data["parent_ids"])
+ if not arm_data.get("parents") or len(arm_data["parents"]) != num_points:
+ GetParents(arm_data)
+ to_array(data.parents, arm_data["parents"])
+
+ #Extract the bone ids and weights for each mesh vertex
+ mesh = mesh_obj.data
+ num_verts = len(mesh.vertices)
+ bone_ids = []
+ bone_weights = []
+ count = 0;
+ for vert in mesh.vertices:
+ new_bone_weights = []
+ #For each vertex group, check if that bone is a real deformation bone.
+ #If so, record bone id and weight for this vertex
+ for i in range(len(vert.groups)):
+ temp_name = mesh_obj.vertex_groups[vert.groups[i].group].name
+ if not temp_name in name_shift:
+ continue
+ group_name = name_shift[temp_name]
+ new_bone_weights.append(BoneWeight(int(group_name.split('_')[-1]), \
+ vert.groups[i].weight))
+ #Sort bone weights, and keep the four greatest
+ new_bone_weights.sort(key=operator.attrgetter('weight'), reverse=True)
+ new_bone_weights = new_bone_weights[0:4]
+ while len(new_bone_weights)<4:
+ new_bone_weights.append(BoneWeight(0,0.0))
+ #Normalize the bone weights
+ total = 0.0
+ for i in range(4):
+ total = total + new_bone_weights[i].weight
+ for i in range(4):
+ if total != 0:
+ new_bone_weights[i].weight = new_bone_weights[i].weight / total
+ else:
+ print("Error: vertex ",count," has no weights!")
+ #Record weights and id in list
+ for bone_weight in new_bone_weights:
+ bone_ids.append(bone_weight.id)
+ bone_weights.append(bone_weight.weight)
+ #if count > 100 and count < 130:
+ # print("Vertex ",count,": ",new_bone_weights[0].weight,", ",new_bone_weights[1].weight,", ",new_bone_weights[2].weight,", ",new_bone_weights[3].weight)
+ count = count + 1
+
+ #Store the vertex weights in the form of an array of face vertices
+ for face in mesh.faces:
+ for j in range(3):
+ for i in range(4):
+ index = face.vertices[j]*4+i
+ data.bone_ids.append(bone_ids[index])
+ data.bone_weights.append(bone_weights[index])
+
+ print("Num faces: ", len(mesh.faces))
+ print("Num bone weights: ", len(data.bone_weights))
+
+ bpy.ops.object.mode_set(mode='OBJECT')
+ bpy.ops.object.delete()
+ scene.objects.active = old_armature_object
+
+ return data
+
+def WriteInt(file, val):
+ int_loader = Get4ByteIntArray()
+ int_loader.append(val)
+ int_loader.tofile(file)
+
+def WritePHXBN(file_path, data):
+ file = open(file_path, 'wb')
+
+ print ("Saving phxbn file: ", file_path)
+ data.version.tofile(file)
+ print ('Version: ', data.version[0])
+
+ data.rigging_stage.tofile(file)
+ print ('Rigging stage: ', data.rigging_stage[0])
+
+ num_points = Get4ByteIntArray()
+ num_points.append(len(data.vertices))
+ num_points.tofile(file)
+ print ('Num points: ', num_points[0])
+
+ for vertex in data.vertices:
+ #print(vertex)
+ vertex.tofile(file)
+
+ data.parents.tofile(file)
+
+ num_bones = Get4ByteIntArray()
+ num_bones.append(len(data.bones))
+ num_bones.tofile(file)
+ print ('Num bones: ', num_bones[0])
+
+ for bone in data.bones:
+ bone.tofile(file)
+
+ data.bone_parents.tofile(file)
+
+ data.bone_mass.tofile(file)
+ data.bone_com.tofile(file)
+ data.bone_mat.tofile(file)
+
+ num_vertices = Get4ByteIntArray()
+ num_vertices.append(len(data.bone_weights)//4)
+ num_vertices.tofile(file)
+
+ data.bone_weights.tofile(file)
+ data.bone_ids.tofile(file)
+
+ data.parent_ids.tofile(file)
+
+ num_joints = Get4ByteIntArray()
+ num_joints.append(len(data.joints))
+ num_joints.tofile(file)
+
+ print("Saving ", num_joints[0], " joints")
+
+ for joint in data.joints:
+ joint.type.tofile(file)
+ if joint.type[0] == phxbn_types._amotor_joint:
+ joint.stop_angles.tofile(file)
+ elif joint.type[0] == phxbn_types._hinge_joint:
+ joint.stop_angles.tofile(file)
+ joint.bone_ids.tofile(file)
+ if joint.type[0] == phxbn_types._hinge_joint:
+ joint.axis.tofile(file)
+
+ num_ik_bones = len(data.ik_bones)
+ WriteInt(file, num_ik_bones)
+ for ik_bone in data.ik_bones:
+ WriteInt(file, ik_bone.bone)
+ WriteInt(file, ik_bone.chain)
+ string_array = array.array("B")
+ string_array.fromstring(ik_bone.name.encode("utf-8"))
+ WriteInt(file, len(string_array))
+ string_array.tofile(file)
+
+ file.close()
+
+def Save(filepath):
+ data = PHXBNFromArmature()
+ if not data:
+ return
+ WritePHXBN(filepath, data)
+
+class PHXBNExporter(bpy.types.Operator):
+ '''Save Phoenix Bones armature'''
+ bl_idname = "export_mesh.phxbn"
+ bl_label = "Export PHXBN"
+
+ filepath = StringProperty(name="File Path", description="Filepath used for exporting the PHXBN file", maxlen= 1024, default= "")
+
+ check_existing = BoolProperty(name="Check Existing", description="Check and warn on overwriting existing files", default=True, options={'HIDDEN'})
+
+ def execute(self, context):
+ Save(self.properties.filepath)
+ return {'FINISHED'}
+
+ def invoke(self, context, event):
+ wm = context.window_manager
+ wm.add_fileselect(self)
+ return {'RUNNING_MODAL'}
+
+#Save("C:\\Users\\David\\Desktop\\export.phxbn")
diff --git a/Data/BlenderScript/addons/io_phxbn/import_phxbn.py b/Data/BlenderScript/addons/io_phxbn/import_phxbn.py
new file mode 100644
index 00000000..42cf757a
--- /dev/null
+++ b/Data/BlenderScript/addons/io_phxbn/import_phxbn.py
@@ -0,0 +1,560 @@
+import bpy
+from bpy.props import *
+import array
+import mathutils
+import math
+from mathutils import Vector
+from io_phxbn import phxbn_types
+
+def Get4ByteIntArray():
+ var = array.array('l')
+ if var.itemsize != 4:
+ var = array.array('i')
+ return var
+
+def WriteToTextFile(file_path, data):
+ file = open(file_path, 'w')
+ file.write('Version: '+str(data.version[0])+'\n')
+ file.write('Rigging stage: '+str(data.rigging_stage[0])+'\n')
+ file.write('Num points: '+str(len(data.vertices))+'\n')
+
+ file.write('\nVertices:\n')
+ for vertex in data.vertices:
+ file.write(str(vertex)+'\n')
+
+ file.write('\nParents:\n')
+ for parent in data.parents:
+ file.write(str(parent)+'\n')
+
+ file.write('\nNum bones: '+str(len(data.bones))+'\n')
+
+ file.write('\nBones:\n')
+ for bone in data.bones:
+ file.write(str(bone)+'\n')
+
+ file.write('\nBone parents:\n')
+ for bone_parent in data.bone_parents:
+ file.write(str(bone_parent)+'\n')
+
+ file.write('\nBone mass:\n')
+ for bone_mass in data.bone_mass:
+ file.write(str(bone_mass)+'\n')
+
+ file.write('\nBone COM:\n')
+ for bone_com in data.bone_com:
+ file.write(str(bone_com)+'\n')
+
+ file.write('Parent IDs:\n')
+ for parent_id in data.parent_ids:
+ file.write(str(parent_id)+'\n')
+
+ file.write('\nNum joints: '+str(len(data.joints))+'\n')
+
+ i = 0
+ for joint in data.joints:
+ file.write('\nJoint '+str(i)+':\n')
+ file.write('Joint type: '+str(joint.type)+'\n')
+ file.write('Stop angles: '+str(joint.stop_angles)+'\n')
+ file.write('Bone ids: '+str(joint.bone_ids)+'\n')
+ file.write('Axis: '+str(joint.axis)+'\n')
+ i = i + 1
+
+ file.close()
+
+def ReadPHXBN(file_path, mesh_obj):
+ # File loading stuff
+ # Open the file for importing
+ file = open(file_path, 'rb')
+
+ data = phxbn_types.PHXBNdata();
+
+ print ("\nLoading phxbn file: ", file_path)
+ data.version.fromfile(file, 1)
+ print ('Version: ', data.version[0], '\n')
+
+ if data.version[0] < 8:
+ print('PHXBN must be at least version 8.')
+ return
+
+ data.rigging_stage.fromfile(file, 1)
+ print ('Rigging stage: ', data.rigging_stage[0], '\n')
+
+ if data.rigging_stage[0] != 1:
+ print('Rigging stage should be 1')
+ return
+
+ num_points = Get4ByteIntArray()
+ num_points.fromfile(file, 1)
+ print ('Num points: ', num_points[0], '\n')
+
+ for i in range(0,num_points[0]):
+ vertex = array.array('f')
+ vertex.fromfile(file, 3)
+ #print(vertex)
+ #Convert Phoenix coordinates to Blender coordinates
+ vertex[1], vertex[2] = -vertex[2], vertex[1]
+ data.vertices.append(vertex)
+
+ for i in range(0,num_points[0]):
+ parent = Get4ByteIntArray()
+ parent.fromfile(file, 1)
+ data.parents.append(parent[0])
+
+ num_bones = Get4ByteIntArray()
+ num_bones.fromfile(file, 1)
+ print ('Num bones: ', num_bones[0], '\n')
+
+ for i in range(0,num_bones[0]):
+ bone = Get4ByteIntArray()
+ bone.fromfile(file, 2)
+ if data.parents[bone[0]]==bone[1]:
+ temp = bone[0]
+ bone[0] = bone[1]
+ bone[1] = temp
+ data.bone_swap.append(1)
+ else:
+ data.bone_swap.append(0)
+ data.bones.append(bone)
+
+ for i in range(0,num_bones[0]):
+ bone_parent = Get4ByteIntArray()
+ bone_parent.fromfile(file, 1)
+ data.bone_parents.append(bone_parent[0])
+
+ data.bone_mass.fromfile(file, num_bones[0])
+ data.bone_com.fromfile(file, num_bones[0]*3)
+ data.bone_mat.fromfile(file, num_bones[0]*16)
+
+ mesh = mesh_obj.data
+
+ num_faces = len(mesh.faces)
+ num_verts = num_faces * 3
+ #print ("Loading ", num_verts, " bone weights and ids")
+ file_bone_weights = array.array('f')
+ file_bone_ids = array.array('f')
+ file_bone_weights.fromfile(file, num_verts*4)
+ file_bone_ids.fromfile(file, num_verts*4)
+
+ num_verts = len(mesh.vertices)
+ data.bone_weights = [0 for i in range(num_verts*4)]
+ data.bone_ids = [0 for i in range(num_verts*4)]
+ for face_id in range(num_faces):
+ for face_vert_num in range(3):
+ for i in range(4):
+ data.bone_weights[mesh.faces[face_id].vertices[face_vert_num]*4+i] =file_bone_weights[face_id*12 + face_vert_num * 4 + i]
+ data.bone_ids[mesh.faces[face_id].vertices[face_vert_num]*4+i] =file_bone_ids[face_id*12 + face_vert_num * 4 + i]
+
+ data.parent_ids.fromfile(file, num_bones[0])
+
+ num_joints = Get4ByteIntArray()
+ num_joints.fromfile(file, 1)
+
+ #print("Loading ", num_joints, " joints")
+
+ for i in range(num_joints[0]):
+ joint = phxbn_types.Joint()
+ joint.type.fromfile(file, 1)
+ if joint.type[0] == phxbn_types._amotor_joint:
+ joint.stop_angles.fromfile(file, 6)
+ elif joint.type[0] == phxbn_types._hinge_joint:
+ joint.stop_angles.fromfile(file, 2)
+ joint.bone_ids.fromfile(file, 2)
+ if joint.type[0] == phxbn_types._hinge_joint:
+ joint.axis.fromfile(file, 3)
+ data.joints.append(joint)
+
+ file.close()
+
+ #WriteToTextFile(file_path+"_text.txt", data)
+ return data
+
+def GetMeshObj():
+ for obj in bpy.context.selected_objects:
+ if obj.type == 'MESH':
+ return obj
+
+def GetMeshMidpoint(data, mesh_obj):
+ mesh = mesh_obj.data
+
+ min_point = array.array('f')
+ max_point = array.array('f')
+
+ for i in range(3):
+ min_point.append(mesh.vertices[0].co[i])
+ max_point.append(mesh.vertices[0].co[i])
+
+ for vert in mesh.vertices:
+ for i in range(3):
+ min_point[i] = min(min_point[i], vert.co[i])
+ max_point[i] = max(max_point[i], vert.co[i])
+
+ mid_point = array.array('f')
+ for i in range(3):
+ mid_point.append((min_point[i] + max_point[i]) * 0.5)
+
+ return mid_point
+ #for vert in data.vertices:
+ # for i in range(3):
+ # vert[i] = vert[i] + mid_point[i]
+
+def GetD6Mat(edit_bone):
+ vec = (edit_bone.tail - edit_bone.head).normalize()
+ vec = Vector((vec[0], vec[2], -vec[1]))
+ right = Vector((0.0001,1.0001,0.000003))
+ up = vec.cross(right).normalize()
+ right = up.cross(vec).normalize()
+
+ return [right[0], right[1], right[2], 0,
+ up[0], up[1], up[2], 0,
+ vec[0],vec[1], vec[2], 0,
+ 0,0,0,1]
+
+def AddFixedJoint(arm_obj,joint):
+ joint_bones = []
+ joint_bones.append(arm_obj.pose.bones["Bone_"+str(joint.bone_ids[0])])
+ joint_bones.append(arm_obj.pose.bones["Bone_"+str(joint.bone_ids[1])])
+
+ if joint_bones[0].parent == joint_bones[1]:
+ child = joint_bones[0]
+ parented = True
+ elif joint_bones[1].parent == joint_bones[0]:
+ child = joint_bones[1]
+ parented = True
+ else:
+ child = joint_bones[1]
+ parented = False
+
+ constraint = child.constraints.new('LIMIT_ROTATION')
+ constraint.use_limit_x = True
+ constraint.use_limit_y = True
+ constraint.use_limit_z = True
+ constraint.influence = 0.0
+ constraint.owner_space = 'LOCAL'
+
+ if parented == False:
+ constraint.name = joint_bones[0].name
+
+def AddAmotorJoint(arm_obj,joint):
+ joint_bones = []
+ joint_bones.append(arm_obj.pose.bones["Bone_"+str(joint.bone_ids[0])])
+ joint_bones.append(arm_obj.pose.bones["Bone_"+str(joint.bone_ids[1])])
+
+ if joint_bones[0].parent == joint_bones[1]:
+ child = joint_bones[0]
+ parented = True
+ elif joint_bones[1].parent == joint_bones[0]:
+ child = joint_bones[1]
+ parented = True
+ else:
+ child = joint_bones[1]
+ parented = False
+
+ constraint = child.constraints.new('LIMIT_ROTATION')
+ constraint.min_x = joint.stop_angles[2]
+ constraint.max_x = joint.stop_angles[3]
+ constraint.min_y = joint.stop_angles[0]
+ constraint.max_y = joint.stop_angles[1]
+ constraint.min_z = joint.stop_angles[4]
+ constraint.max_z = joint.stop_angles[5]
+ constraint.use_limit_x = True
+ constraint.use_limit_y = True
+ constraint.use_limit_z = True
+ constraint.influence = 0.0
+ constraint.owner_space = 'LOCAL'
+
+ if parented == False:
+ constraint.name = joint_bones[0].name
+
+ matrix = GetD6Mat(arm_obj.data.edit_bones[child.name])
+ joint_axis = Vector((matrix[0], -matrix[2], matrix[1]))
+
+ mat = arm_obj.matrix_world * child.matrix
+ x_axis = Vector((mat[0][0], mat[0][1], mat[0][2])).normalize()
+ y_axis = Vector((mat[2][0], mat[2][1], mat[2][2])).normalize()
+ joint_conv = Vector((-x_axis.dot(joint_axis), -y_axis.dot(joint_axis), 0)).normalize()
+ roll = math.atan2(joint_conv[0], joint_conv[1])
+ arm_obj.data.edit_bones[child.name].roll += roll
+
+ child.rotation_mode = 'YXZ'
+
+
+ '''mat = arm_obj.matrix_world * child.matrix
+ bpy.ops.object.add(type='EMPTY', location=(mat[3][0], mat[3][1], mat[3][2]))
+ ob = bpy.context.scene.objects.active
+ ob.empty_draw_type = 'ARROWS'
+ ob.empty_draw_size = 0.1
+
+ mat = arm_obj.matrix_world * child.matrix
+ bpy.ops.object.add(type='EMPTY', location=(mat[3][0] + y_axis[0]*0.1, mat[3][1] + y_axis[1]*0.1, mat[3][2] + y_axis[2]*0.1))
+ ob = bpy.context.scene.objects.active
+ ob.empty_draw_type = 'ARROWS'
+ ob.empty_draw_size = 0.1
+
+ bpy.context.scene.objects.active = arm_obj
+ bpy.ops.object.mode_set(mode='EDIT')'''
+
+def AddHingeJoint(arm_obj,joint):
+ joint_bones = []
+ joint_bones.append(arm_obj.pose.bones["Bone_"+str(joint.bone_ids[0])])
+ joint_bones.append(arm_obj.pose.bones["Bone_"+str(joint.bone_ids[1])])
+ joint_axis = Vector((joint.axis[0], -joint.axis[2], joint.axis[1]))
+
+ if joint_bones[0].parent == joint_bones[1]:
+ child = joint_bones[0]
+ joint_axis *= -1
+ parented = True
+ elif joint_bones[1].parent == joint_bones[0]:
+ child = joint_bones[1]
+ parented = True
+ else:
+ child = joint_bones[1]
+ parented = False
+
+ constraint = child.constraints.new('LIMIT_ROTATION')
+ constraint.min_x = joint.stop_angles[0]
+ constraint.max_x = joint.stop_angles[1]
+ constraint.use_limit_x = True
+ constraint.use_limit_y = True
+ constraint.use_limit_z = True
+ constraint.influence = 0.0
+ constraint.owner_space = 'LOCAL'
+
+ if parented == False:
+ constraint.name = joint_bones[0].name
+
+ mat = arm_obj.matrix_world * child.matrix
+ x_axis = Vector((mat[0][0], mat[0][1], mat[0][2])).normalize()
+ y_axis = Vector((mat[2][0], mat[2][1], mat[2][2])).normalize()
+ joint_conv = Vector((x_axis.dot(joint_axis), y_axis.dot(joint_axis), 0)).normalize()
+ roll = math.atan2(joint_conv[0], joint_conv[1]) + math.pi / 2
+ arm_obj.data.edit_bones[child.name].roll += roll
+
+ child.rotation_mode = 'XYZ'
+
+ '''mat = arm_obj.matrix_world * child.matrix
+ bpy.ops.object.add(type='EMPTY', location=(mat[3][0], mat[3][1], mat[3][2]))
+ ob = bpy.context.scene.objects.active
+ ob.empty_draw_type = 'ARROWS'
+ ob.empty_draw_size = 0.1'''
+
+ '''mat = arm_obj.matrix_world * child.matrix
+ bpy.ops.object.add(type='EMPTY', location=(mat[3][0] + y_axis[0]*0.1, mat[3][1] + y_axis[1]*0.1, mat[3][2] + y_axis[2]*0.1))
+ ob = bpy.context.scene.objects.active
+ ob.empty_draw_type = 'ARROWS'
+ ob.empty_draw_size = 0.1
+
+ mat = arm_obj.matrix_world * child.matrix
+ x_axis = Vector((mat[0][0], mat[0][1], mat[0][2])) * 0.1
+ bpy.ops.object.add(type='EMPTY', location=(mat[3][0]+x_axis[0], mat[3][1]+x_axis[1], mat[3][2]+x_axis[2]))
+ ob = bpy.context.scene.objects.active
+ ob.empty_draw_type = 'ARROWS'
+ ob.empty_draw_size = 0.1'''
+
+ '''mat = arm_obj.matrix_world * child.matrix
+ x_axis = Vector((joint.axis[0], -joint.axis[2], joint.axis[1])) * 0.1
+ bpy.ops.object.add(type='EMPTY', location=(mat[3][0]+x_axis[0], mat[3][1]+x_axis[1], mat[3][2]+x_axis[2]))
+ ob = bpy.context.scene.objects.active
+ ob.empty_draw_type = 'ARROWS'
+ ob.empty_draw_size = 0.1
+
+ bpy.context.scene.objects.active = arm_obj
+ bpy.ops.object.mode_set(mode='EDIT')'''
+
+def AddArmature(data, mesh_obj):
+ scn = bpy.context.scene
+ for ob in scn.objects:
+ ob.select = False
+
+ mid_point = GetMeshMidpoint(data, mesh_obj)
+
+ arm_data = bpy.data.armatures.new("MyPHXBN")
+ arm_data.use_auto_ik = True
+ arm_obj = bpy.data.objects.new("MyPHXBN",arm_data)
+ scn.objects.link(arm_obj)
+ arm_obj.select = True
+ arm_obj.location = mid_point
+ scn.objects.active = arm_obj
+
+ arm_data["version"] = data.version[0]
+ arm_data["rigging_stage"] = data.rigging_stage[0]
+
+ bpy.ops.object.mode_set(mode='EDIT')
+
+ bpy.ops.armature.delete()
+ bpy.ops.armature.select_all()
+ bpy.ops.armature.delete()
+
+ mesh = mesh_obj.data
+ min_point = array.array('f')
+ max_point = array.array('f')
+
+ for i in range(3):
+ min_point.append(mesh.vertices[0].co[i])
+ max_point.append(mesh.vertices[0].co[i])
+
+ for vert in mesh.vertices:
+ for i in range(3):
+ min_point[i] = min(min_point[i], vert.co[i])
+ max_point[i] = max(max_point[i], vert.co[i])
+
+ num = 0
+ for data_bone in data.bones:
+ bpy.ops.armature.bone_primitive_add(name="Bone_"+str(num))
+ bone = arm_data.edit_bones[-1]
+ bone.use_connect = True
+ bone["Point IDs"] = data_bone
+ bone["Swap"] = data.bone_swap[num]
+ #print (data_bone)
+ bone.head = Vector((data.vertices[data_bone[0]][0],
+ data.vertices[data_bone[0]][1],
+ data.vertices[data_bone[0]][2]))
+ bone.tail = Vector((data.vertices[data_bone[1]][0],
+ data.vertices[data_bone[1]][1],
+ data.vertices[data_bone[1]][2]))
+ bone["Mass"] = data.bone_mass[num]
+ bone["COM"] = [data.bone_com[num*3+0],
+ data.bone_com[num*3+1],
+ data.bone_com[num*3+2]]
+ bone["mat"] = data.bone_mat[num*16:num*16+16]
+ num = num + 1
+
+ #print("Bones: ", data.bones)
+
+
+ bpy.ops.armature.bone_primitive_add(name="root")
+ bone = arm_data.edit_bones[-1]
+ bone.head = Vector(((min_point[0]+max_point[0])*0.5,
+ (min_point[1]+max_point[1])*0.5+0.5,
+ -mid_point[2]))
+ bone.tail = Vector(((min_point[0]+max_point[0])*0.5,
+ (min_point[1]+max_point[1])*0.5+0.25,
+ -mid_point[2]))
+
+ num = 0
+ for bone_parent in data.bone_parents:
+ name = "Bone_"+str(num);
+ parent_name = "Bone_"+str(bone_parent)
+ if bone_parent != -1:
+ arm_data.edit_bones[name].parent = arm_data.edit_bones[bone_parent]
+ else:
+ arm_data.edit_bones[name].use_connect = False
+ arm_data.edit_bones[name].parent = arm_data.edit_bones["root"]
+ num = num + 1
+
+ bpy.context.scene.update()
+
+ bpy.ops.object.mode_set(mode='OBJECT')
+ bpy.context.scene.objects.active = mesh_obj
+ bpy.ops.object.vertex_group_remove(all=True)
+
+ vertgroup_created=[]
+ for bone in data.bones:
+ vertgroup_created.append(0)
+
+ mesh = mesh_obj.data
+ index = 0
+ vert_index = 0
+ for vert in mesh.vertices:
+ for bone_num in range(0,4):
+ bone_id = int(data.bone_ids[index])
+ bone_weight = data.bone_weights[index]
+ name = "Bone_"+str(bone_id);
+ if vertgroup_created[bone_id]==0:
+ vertgroup_created[bone_id]=1
+ mesh_obj.vertex_groups.new(name)
+ #assign the weight for this vertex
+ mesh_obj.vertex_groups.assign([vert_index],
+ mesh_obj.vertex_groups[name],
+ bone_weight,
+ 'REPLACE')
+ index = index + 1
+ vert_index = vert_index + 1
+ mesh.update()
+
+ mesh_obj.select = True
+ scn.objects.active = arm_obj
+ bpy.ops.object.parent_set(type='ARMATURE')
+ #arm_obj.makeParentDeform([mesh_obj], 0, 0)
+ arm_obj.show_x_ray = True
+
+ def ncr(n, r):
+ return math.factorial(n) / \
+ (math.factorial(r) * math.factorial(n - r))
+
+ def CountJoints(bone):
+ num_children = len(bone.children)
+ if num_children == 0:
+ return 0
+ if bone.name != "root":
+ num_children += 1
+ count = ncr(num_children, 2)
+ for child in bone.children:
+ count += CountJoints(child)
+ return int(count)
+
+ '''arm_data["num_joints"] = CountJoints(arm_data.bones["root"])
+ arm_data["num_special_joints"] = len(data.joints)
+ num = 0
+ for joint in data.joints:
+ name = "joint_" + str(num)
+ arm_data[name + "_type"] = joint.type[0]
+ if joint.type[0] == phxbn_types._amotor_joint:
+ arm_data[name + "_angles"] = joint.stop_angles
+ elif joint.type[0] == phxbn_types._hinge_joint:
+ arm_data[name + "_angles"] = joint.stop_angles
+ arm_data[name + "_bone_ids"] = joint.bone_ids
+ if joint.type[0] == phxbn_types._hinge_joint:
+ arm_data[name+"_axis"] = joint.axis
+ num = num + 1'''
+ arm_data["parents"] = data.parents
+ arm_data["parent_ids"] = data.parent_ids
+
+ bpy.ops.object.mode_set(mode='EDIT')
+ for joint in data.joints:
+ if joint.type[0] == phxbn_types._hinge_joint:
+ AddHingeJoint(arm_obj, joint)
+ if joint.type[0] == phxbn_types._amotor_joint:
+ AddAmotorJoint(arm_obj, joint)
+ if joint.type[0] == phxbn_types._fixed_joint:
+ AddFixedJoint(arm_obj, joint)
+
+def Load(filepath):
+ mesh_obj = GetMeshObj()
+ if not mesh_obj:
+ print("No mesh is selected")
+ return
+
+ data = ReadPHXBN(filepath, mesh_obj)
+ if not data:
+ return
+
+ AddArmature(data, mesh_obj)
+
+class PHXBNImporter(bpy.types.Operator):
+ '''Load Phoenix Bones armature'''
+ bl_idname = "import_armature.phxbn"
+ bl_label = "Import PHXBN"
+
+ filepath = StringProperty(name="File Path", description="Filepath used for importing the PHXBN file", maxlen=1024, default="")
+
+ def execute(self, context):
+ print("Filepath:",self.properties.filepath)
+ Load(self.properties.filepath)
+
+ filename = self.properties.filepath.split("\\")[-1]
+ #convert the filename to an object name
+ objName = bpy.path.display_name(filename)
+ print("Filename:",filename)
+
+ #mesh = readMesh(self.properties.filepath, objName)
+ #addMeshObj(mesh, objName)
+
+ return {'FINISHED'}
+
+ def invoke(self, context, event):
+ wm = context.window_manager
+ wm.add_fileselect(self)
+ return {'RUNNING_MODAL'}
+
+#Load("C:\\Users\\David\\Desktop\\Wolfire SVN\\Project\\Data\\Skeletons\\basic-attached-guard-joints.phxbn")
+#Load("C:\\Users\\David\\Desktop\\export.phxbn")
diff --git a/Data/BlenderScript/addons/io_phxbn/phxbn_types.py b/Data/BlenderScript/addons/io_phxbn/phxbn_types.py
new file mode 100644
index 00000000..ad52b0b5
--- /dev/null
+++ b/Data/BlenderScript/addons/io_phxbn/phxbn_types.py
@@ -0,0 +1,42 @@
+import array
+
+_hinge_joint = 0
+_amotor_joint = 1
+_fixed_joint = 2
+
+class IKBone:
+ def __init__(self):
+ self.name = ""
+ self.bone = 0
+ self.chain = 0
+
+def Get4ByteIntArray():
+ var = array.array('l')
+ if var.itemsize != 4:
+ var = array.array('i')
+ return var
+
+class Joint:
+ def __init__(self):
+ self.type = Get4ByteIntArray()
+ self.stop_angles = array.array('f')
+ self.bone_ids = Get4ByteIntArray()
+ self.axis = array.array('f')
+
+class PHXBNdata:
+ def __init__(self):
+ self.vertices = []
+ self.bones = []
+ self.parents = Get4ByteIntArray()
+ self.bone_parents = Get4ByteIntArray()
+ self.bone_weights = array.array('f')
+ self.bone_ids = array.array('f')
+ self.bone_swap = Get4ByteIntArray()
+ self.bone_mass = array.array('f')
+ self.bone_com = array.array('f')
+ self.rigging_stage = Get4ByteIntArray()
+ self.version = Get4ByteIntArray()
+ self.parent_ids = Get4ByteIntArray()
+ self.joints = []
+ self.bone_mat = array.array('f')
+ self.ik_bones = []
diff --git a/Data/BlenderScript/addons/symmetricalize.py b/Data/BlenderScript/addons/symmetricalize.py
new file mode 100644
index 00000000..5c779050
--- /dev/null
+++ b/Data/BlenderScript/addons/symmetricalize.py
@@ -0,0 +1,79 @@
+"""
+This script makes meshes symmetrical if they are nearly symmetrical
+"""
+
+bl_addon_info = {
+ 'name': 'Mesh: Symmetricalize',
+ 'author': 'David Rosen',
+ 'version': '1',
+ 'blender': (2, 5, 4),
+ 'location': 'View3D > Specials > Symmetricalize',
+ 'description': 'This script makes meshes symmetrical if they are nearly symmetrical',
+ 'warning': '', # used for warning icon and text in addons panel
+ 'wiki_url': '',
+ 'tracker_url': '',
+ 'category': 'Mesh'}
+
+import bpy
+
+def SymmetricalizeFunc(context):
+ print("Symmetricalizing...")
+ obj = context.scene.objects.active
+ mesh = obj.data
+
+ bpy.ops.object.mode_set(mode='OBJECT')
+
+ min_coord = mesh.vertices[0].co[0]
+ for vert in mesh.vertices:
+ if(abs(vert.co[0])<abs(min_coord)):
+ min_coord = vert.co[0]
+
+ print("Old center point was: ", min_coord)
+
+ for vert in mesh.vertices:
+ vert.co[0] = vert.co[0] - min_coord
+
+ bucket_scale = 100000
+
+ vert_buckets = {}
+ for vert in mesh.vertices:
+ if vert.co[0] > 0:
+ vert_buckets[int(round(vert.co[0] * bucket_scale))] = vert.index
+
+ for vert in mesh.vertices:
+ if(vert.co[0] < 0):
+ index = int(round(-vert.co[0] * bucket_scale))
+ if index in vert_buckets:
+ vert.co[0] = mesh.vertices[vert_buckets[index]].co[0] * -1
+
+ bpy.ops.object.mode_set(mode='EDIT')
+
+class Symmetricalize(bpy.types.Operator):
+ '''Makes meshes symmetrical if they are nearly symmetrical'''
+ bl_idname = 'mesh.symmetricalize'
+ bl_label = 'Symmetricalize'
+ bl_options = {'REGISTER', 'UNDO'}
+
+ @classmethod
+ def poll(self, context):
+ obj = context.active_object
+ return (obj and obj.type == 'MESH')
+
+ def execute(self, context):
+ SymmetricalizeFunc(context)
+ return {'FINISHED'}
+
+menu_func = (lambda self, context: self.layout.operator(Symmetricalize.bl_idname, text="Symmetricalize"))
+
+def register():
+ #bpy.types.register(Symmetricalize)
+ bpy.types.VIEW3D_MT_edit_mesh_specials.append(menu_func)
+ bpy.types.VIEW3D_MT_edit_mesh_vertices.append(menu_func)
+
+def unregister():
+ #bpy.types.unregister(Symmetricalize)
+ bpy.types.VIEW3D_MT_edit_mesh_specials.remove(menu_func)
+ bpy.types.VIEW3D_MT_edit_mesh_vertices.remove(menu_func)
+
+if __name__ == "__main__":
+ register()
diff --git a/Data/BlenderScript/io/export_obj.py b/Data/BlenderScript/io/export_obj.py
new file mode 100644
index 00000000..53e1f1f6
--- /dev/null
+++ b/Data/BlenderScript/io/export_obj.py
@@ -0,0 +1,989 @@
+# ##### BEGIN GPL LICENSE BLOCK #####
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License
+# as published by the Free Software Foundation; either version 2
+# of the License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+#
+# ##### END GPL LICENSE BLOCK #####
+
+# <pep8 compliant>
+
+"""
+Name: 'Wavefront (.obj)...'
+Blender: 248
+Group: 'Export'
+Tooltip: 'Save a Wavefront OBJ File'
+"""
+
+__author__ = "Campbell Barton, Jiri Hnidek, Paolo Ciccone"
+__url__ = ['http://wiki.blender.org/index.php/Scripts/Manual/Export/wavefront_obj', 'www.blender.org', 'blenderartists.org']
+__version__ = "1.21"
+
+__bpydoc__ = """\
+This script is an exporter to OBJ file format.
+
+Usage:
+
+Select the objects you wish to export and run this script from "File->Export" menu.
+Selecting the default options from the popup box will be good in most cases.
+All objects that can be represented as a mesh (mesh, curve, metaball, surface, text3d)
+will be exported as mesh data.
+"""
+
+# import math and other in functions that use them for the sake of fast Blender startup
+# import math
+import os
+import time
+import shutil
+
+import bpy
+import mathutils
+
+
+# Returns a tuple - path,extension.
+# 'hello.obj' > ('hello', '.obj')
+def splitExt(path):
+ dotidx = path.rfind('.')
+ if dotidx == -1:
+ return path, ''
+ else:
+ return path[:dotidx], path[dotidx:]
+
+def fixName(name):
+ if name == None:
+ return 'None'
+ else:
+ return name.replace(' ', '_')
+
+def write_mtl(scene, filepath, copy_images, mtl_dict):
+
+ world = scene.world
+ worldAmb = world.ambient_color
+
+ dest_dir = os.path.dirname(filepath)
+
+ def copy_image(image):
+ fn = bpy.utils.expandpath(image.filepath)
+ fn_strip = os.path.basename(fn)
+ if copy_images:
+ rel = fn_strip
+ fn_abs_dest = os.path.join(dest_dir, fn_strip)
+ if not os.path.exists(fn_abs_dest):
+ shutil.copy(fn, fn_abs_dest)
+ else:
+ rel = fn
+
+ return rel
+
+
+ file = open(filepath, "w")
+ # XXX
+# file.write('# Blender MTL File: %s\n' % Blender.Get('filepath').split('\\')[-1].split('/')[-1])
+ file.write('# Material Count: %i\n' % len(mtl_dict))
+ # Write material/image combinations we have used.
+ for key, (mtl_mat_name, mat, img) in mtl_dict.items():
+
+ # Get the Blender data for the material and the image.
+ # Having an image named None will make a bug, dont do it :)
+
+ file.write('newmtl %s\n' % mtl_mat_name) # Define a new material: matname_imgname
+
+ if mat:
+ file.write('Ns %.6f\n' % ((mat.specular_hardness-1) * 1.9607843137254901) ) # Hardness, convert blenders 1-511 to MTL's
+ file.write('Ka %.6f %.6f %.6f\n' % tuple([c*mat.ambient for c in worldAmb]) ) # Ambient, uses mirror colour,
+ file.write('Kd %.6f %.6f %.6f\n' % tuple([c*mat.diffuse_intensity for c in mat.diffuse_color]) ) # Diffuse
+ file.write('Ks %.6f %.6f %.6f\n' % tuple([c*mat.specular_intensity for c in mat.specular_color]) ) # Specular
+ if hasattr(mat, "ior"):
+ file.write('Ni %.6f\n' % mat.ior) # Refraction index
+ else:
+ file.write('Ni %.6f\n' % 1.0)
+ file.write('d %.6f\n' % mat.alpha) # Alpha (obj uses 'd' for dissolve)
+
+ # 0 to disable lighting, 1 for ambient & diffuse only (specular color set to black), 2 for full lighting.
+ if mat.shadeless:
+ file.write('illum 0\n') # ignore lighting
+ elif mat.specular_intensity == 0:
+ file.write('illum 1\n') # no specular.
+ else:
+ file.write('illum 2\n') # light normaly
+
+ else:
+ #write a dummy material here?
+ file.write('Ns 0\n')
+ file.write('Ka %.6f %.6f %.6f\n' % tuple([c for c in worldAmb]) ) # Ambient, uses mirror colour,
+ file.write('Kd 0.8 0.8 0.8\n')
+ file.write('Ks 0.8 0.8 0.8\n')
+ file.write('d 1\n') # No alpha
+ file.write('illum 2\n') # light normaly
+
+ # Write images!
+ if img: # We have an image on the face!
+ # write relative image path
+ rel = copy_image(img)
+ file.write('map_Kd %s\n' % rel) # Diffuse mapping image
+# file.write('map_Kd %s\n' % img.filepath.split('\\')[-1].split('/')[-1]) # Diffuse mapping image
+
+ elif mat: # No face image. if we havea material search for MTex image.
+ for mtex in mat.texture_slots:
+ if mtex and mtex.texture.type == 'IMAGE':
+ try:
+ filepath = copy_image(mtex.texture.image)
+# filepath = mtex.texture.image.filepath.split('\\')[-1].split('/')[-1]
+ file.write('map_Kd %s\n' % filepath) # Diffuse mapping image
+ break
+ except:
+ # Texture has no image though its an image type, best ignore.
+ pass
+
+ file.write('\n\n')
+
+ file.close()
+
+# XXX not used
+def copy_file(source, dest):
+ file = open(source, 'rb')
+ data = file.read()
+ file.close()
+
+ file = open(dest, 'wb')
+ file.write(data)
+ file.close()
+
+
+# XXX not used
+def copy_images(dest_dir):
+ if dest_dir[-1] != os.sep:
+ dest_dir += os.sep
+# if dest_dir[-1] != sys.sep:
+# dest_dir += sys.sep
+
+ # Get unique image names
+ uniqueImages = {}
+ for matname, mat, image in mtl_dict.values(): # Only use image name
+ # Get Texface images
+ if image:
+ uniqueImages[image] = image # Should use sets here. wait until Python 2.4 is default.
+
+ # Get MTex images
+ if mat:
+ for mtex in mat.texture_slots:
+ if mtex and mtex.texture.type == 'IMAGE':
+ image_tex = mtex.texture.image
+ if image_tex:
+ try:
+ uniqueImages[image_tex] = image_tex
+ except:
+ pass
+
+ # Now copy images
+ copyCount = 0
+
+# for bImage in uniqueImages.values():
+# image_path = bpy.utils.expandpath(bImage.filepath)
+# if bpy.sys.exists(image_path):
+# # Make a name for the target path.
+# dest_image_path = dest_dir + image_path.split('\\')[-1].split('/')[-1]
+# if not bpy.utils.exists(dest_image_path): # Image isnt already there
+# print('\tCopying "%s" > "%s"' % (image_path, dest_image_path))
+# copy_file(image_path, dest_image_path)
+# copyCount+=1
+
+# paths= bpy.util.copy_images(uniqueImages.values(), dest_dir)
+
+ print('\tCopied %d images' % copyCount)
+# print('\tCopied %d images' % copyCount)
+
+# XXX not converted
+def test_nurbs_compat(ob):
+ if ob.type != 'Curve':
+ return False
+
+ for nu in ob.data:
+ if (not nu.knotsV) and nu.type != 1: # not a surface and not bezier
+ return True
+
+ return False
+
+
+# XXX not converted
+def write_nurb(file, ob, ob_mat):
+ tot_verts = 0
+ cu = ob.data
+
+ # use negative indices
+ Vector = Blender.mathutils.Vector
+ for nu in cu:
+
+ if nu.type==0: DEG_ORDER_U = 1
+ else: DEG_ORDER_U = nu.orderU-1 # Tested to be correct
+
+ if nu.type==1:
+ print("\tWarning, bezier curve:", ob.name, "only poly and nurbs curves supported")
+ continue
+
+ if nu.knotsV:
+ print("\tWarning, surface:", ob.name, "only poly and nurbs curves supported")
+ continue
+
+ if len(nu) <= DEG_ORDER_U:
+ print("\tWarning, orderU is lower then vert count, skipping:", ob.name)
+ continue
+
+ pt_num = 0
+ do_closed = (nu.flagU & 1)
+ do_endpoints = (do_closed==0) and (nu.flagU & 2)
+
+ for pt in nu:
+ pt = Vector(pt[0], pt[1], pt[2]) * ob_mat
+ file.write('v %.6f %.6f %.6f\n' % (pt[0], pt[1], pt[2]))
+ pt_num += 1
+ tot_verts += pt_num
+
+ file.write('g %s\n' % (fixName(ob.name))) # fixName(ob.getData(1)) could use the data name too
+ file.write('cstype bspline\n') # not ideal, hard coded
+ file.write('deg %d\n' % DEG_ORDER_U) # not used for curves but most files have it still
+
+ curve_ls = [-(i+1) for i in range(pt_num)]
+
+ # 'curv' keyword
+ if do_closed:
+ if DEG_ORDER_U == 1:
+ pt_num += 1
+ curve_ls.append(-1)
+ else:
+ pt_num += DEG_ORDER_U
+ curve_ls = curve_ls + curve_ls[0:DEG_ORDER_U]
+
+ file.write('curv 0.0 1.0 %s\n' % (' '.join( [str(i) for i in curve_ls] ))) # Blender has no U and V values for the curve
+
+ # 'parm' keyword
+ tot_parm = (DEG_ORDER_U + 1) + pt_num
+ tot_parm_div = float(tot_parm-1)
+ parm_ls = [(i/tot_parm_div) for i in range(tot_parm)]
+
+ if do_endpoints: # end points, force param
+ for i in range(DEG_ORDER_U+1):
+ parm_ls[i] = 0.0
+ parm_ls[-(1+i)] = 1.0
+
+ file.write('parm u %s\n' % ' '.join( [str(i) for i in parm_ls] ))
+
+ file.write('end\n')
+
+ return tot_verts
+
+def write(filepath, objects, scene,
+ EXPORT_TRI=False,
+ EXPORT_EDGES=False,
+ EXPORT_NORMALS=False,
+ EXPORT_NORMALS_HQ=False,
+ EXPORT_UV=True,
+ EXPORT_MTL=True,
+ EXPORT_COPY_IMAGES=False,
+ EXPORT_APPLY_MODIFIERS=True,
+ EXPORT_ROTX90=True,
+ EXPORT_BLEN_OBS=True,
+ EXPORT_GROUP_BY_OB=False,
+ EXPORT_GROUP_BY_MAT=False,
+ EXPORT_KEEP_VERT_ORDER=False,
+ EXPORT_POLYGROUPS=False,
+ EXPORT_CURVE_AS_NURBS=True):
+ '''
+ Basic write function. The context and options must be already set
+ This can be accessed externaly
+ eg.
+ write( 'c:\\test\\foobar.obj', Blender.Object.GetSelected() ) # Using default options.
+ '''
+
+ # XXX
+ import math
+
+ def veckey3d(v):
+ return round(v.x, 6), round(v.y, 6), round(v.z, 6)
+
+ def veckey2d(v):
+ return round(v[0], 6), round(v[1], 6)
+ # return round(v.x, 6), round(v.y, 6)
+
+ def findVertexGroupName(face, vWeightMap):
+ """
+ Searches the vertexDict to see what groups is assigned to a given face.
+ We use a frequency system in order to sort out the name because a given vetex can
+ belong to two or more groups at the same time. To find the right name for the face
+ we list all the possible vertex group names with their frequency and then sort by
+ frequency in descend order. The top element is the one shared by the highest number
+ of vertices is the face's group
+ """
+ weightDict = {}
+ for vert_index in face.verts:
+# for vert in face:
+ vWeights = vWeightMap[vert_index]
+# vWeights = vWeightMap[vert]
+ for vGroupName, weight in vWeights:
+ weightDict[vGroupName] = weightDict.get(vGroupName, 0) + weight
+
+ if weightDict:
+ alist = [(weight,vGroupName) for vGroupName, weight in weightDict.items()] # sort least to greatest amount of weight
+ alist.sort()
+ return(alist[-1][1]) # highest value last
+ else:
+ return '(null)'
+
+ # TODO: implement this in C? dunno how it should be called...
+ def getVertsFromGroup(me, group_index):
+ ret = []
+
+ for i, v in enumerate(me.verts):
+ for g in v.groups:
+ if g.group == group_index:
+ ret.append((i, g.weight))
+
+ return ret
+
+
+ print('OBJ Export path: "%s"' % filepath)
+ temp_mesh_name = '~tmp-mesh'
+
+ time1 = time.clock()
+# time1 = sys.time()
+# scn = Scene.GetCurrent()
+
+ file = open(filepath, "w")
+
+ # Write Header
+ file.write('# Blender v%s OBJ File: %s\n' % (bpy.app.version_string, bpy.data.filepath.split('/')[-1].split('\\')[-1] ))
+ file.write('# www.blender.org\n')
+
+ # Tell the obj file what material file to use.
+ if EXPORT_MTL:
+ mtlfilepath = '%s.mtl' % '.'.join(filepath.split('.')[:-1])
+ file.write('mtllib %s\n' % ( mtlfilepath.split('\\')[-1].split('/')[-1] ))
+
+ if EXPORT_ROTX90:
+ mat_xrot90= mathutils.RotationMatrix(-math.pi/2, 4, 'X')
+
+ # Initialize totals, these are updated each object
+ totverts = totuvco = totno = 1
+
+ face_vert_index = 1
+
+ globalNormals = {}
+
+ # A Dict of Materials
+ # (material.name, image.name):matname_imagename # matname_imagename has gaps removed.
+ mtl_dict = {}
+
+ # Get all meshes
+ for ob_main in objects:
+
+ # ignore dupli children
+ if ob_main.parent and ob_main.parent.dupli_type != 'NONE':
+ # XXX
+ print(ob_main.name, 'is a dupli child - ignoring')
+ continue
+
+ obs = []
+ if ob_main.dupli_type != 'NONE':
+ # XXX
+ print('creating dupli_list on', ob_main.name)
+ ob_main.create_dupli_list(scene)
+
+ obs = [(dob.object, dob.matrix) for dob in ob_main.dupli_list]
+
+ # XXX debug print
+ print(ob_main.name, 'has', len(obs), 'dupli children')
+ else:
+ obs = [(ob_main, ob_main.matrix_world)]
+
+ for ob, ob_mat in obs:
+
+ # XXX postponed
+# # Nurbs curve support
+# if EXPORT_CURVE_AS_NURBS and test_nurbs_compat(ob):
+# if EXPORT_ROTX90:
+# ob_mat = ob_mat * mat_xrot90
+
+# totverts += write_nurb(file, ob, ob_mat)
+
+# continue
+# end nurbs
+
+ if ob.type != 'MESH':
+ continue
+
+ me = ob.create_mesh(scene, EXPORT_APPLY_MODIFIERS, 'PREVIEW')
+
+ if EXPORT_ROTX90:
+ me.transform(mat_xrot90 * ob_mat)
+ else:
+ me.transform(ob_mat)
+
+# # Will work for non meshes now! :)
+# me= BPyMesh.getMeshFromObject(ob, containerMesh, EXPORT_APPLY_MODIFIERS, EXPORT_POLYGROUPS, scn)
+# if not me:
+# continue
+
+ if EXPORT_UV:
+ faceuv = len(me.uv_textures) > 0
+ else:
+ faceuv = False
+
+ # XXX - todo, find a better way to do triangulation
+ # ...removed convert_to_triface because it relies on editmesh
+ '''
+ # We have a valid mesh
+ if EXPORT_TRI and me.faces:
+ # Add a dummy object to it.
+ has_quads = False
+ for f in me.faces:
+ if f.verts[3] != 0:
+ has_quads = True
+ break
+
+ if has_quads:
+ newob = bpy.data.objects.new('temp_object', me)
+ # if we forget to set Object.data - crash
+ scene.objects.link(newob)
+ newob.convert_to_triface(scene)
+ # mesh will still be there
+ scene.objects.unlink(newob)
+ '''
+
+ # Make our own list so it can be sorted to reduce context switching
+ face_index_pairs = [ (face, index) for index, face in enumerate(me.faces)]
+ # faces = [ f for f in me.faces ]
+
+ if EXPORT_EDGES:
+ edges = me.edges
+ else:
+ edges = []
+
+ if not (len(face_index_pairs)+len(edges)+len(me.verts)): # Make sure there is somthing to write
+
+ # clean up
+ bpy.data.meshes.remove(me)
+
+ continue # dont bother with this mesh.
+
+ # XXX
+ # High Quality Normals
+ if EXPORT_NORMALS and face_index_pairs:
+ me.calc_normals()
+# if EXPORT_NORMALS_HQ:
+# BPyMesh.meshCalcNormals(me)
+# else:
+# # transforming normals is incorrect
+# # when the matrix is scaled,
+# # better to recalculate them
+# me.calcNormals()
+
+ materials = me.materials
+
+ materialNames = []
+ materialItems = [m for m in materials]
+ if materials:
+ for mat in materials:
+ if mat: # !=None
+ materialNames.append(mat.name)
+ else:
+ materialNames.append(None)
+ # Cant use LC because some materials are None.
+ # materialNames = map(lambda mat: mat.name, materials) # Bug Blender, dosent account for null materials, still broken.
+
+ # Possible there null materials, will mess up indicies
+ # but at least it will export, wait until Blender gets fixed.
+ materialNames.extend((16-len(materialNames)) * [None])
+ materialItems.extend((16-len(materialItems)) * [None])
+
+ # Sort by Material, then images
+ # so we dont over context switch in the obj file.
+ if EXPORT_KEEP_VERT_ORDER:
+ pass
+ elif faceuv:
+ # XXX update
+ tface = me.active_uv_texture.data
+
+ face_index_pairs.sort(key=lambda a: (a[0].material_index, hash(tface[a[1]].image), a[0].smooth))
+ elif len(materials) > 1:
+ face_index_pairs.sort(key = lambda a: (a[0].material_index, a[0].smooth))
+ else:
+ # no materials
+ face_index_pairs.sort(key = lambda a: a[0].smooth)
+# if EXPORT_KEEP_VERT_ORDER:
+# pass
+# elif faceuv:
+# try: faces.sort(key = lambda a: (a.mat, a.image, a.smooth))
+# except: faces.sort(lambda a,b: cmp((a.mat, a.image, a.smooth), (b.mat, b.image, b.smooth)))
+# elif len(materials) > 1:
+# try: faces.sort(key = lambda a: (a.mat, a.smooth))
+# except: faces.sort(lambda a,b: cmp((a.mat, a.smooth), (b.mat, b.smooth)))
+# else:
+# # no materials
+# try: faces.sort(key = lambda a: a.smooth)
+# except: faces.sort(lambda a,b: cmp(a.smooth, b.smooth))
+
+ faces = [pair[0] for pair in face_index_pairs]
+
+ # Set the default mat to no material and no image.
+ contextMat = (0, 0) # Can never be this, so we will label a new material teh first chance we get.
+ contextSmooth = None # Will either be true or false, set bad to force initialization switch.
+
+ if EXPORT_BLEN_OBS or EXPORT_GROUP_BY_OB:
+ name1 = ob.name
+ name2 = ob.data.name
+ if name1 == name2:
+ obnamestring = fixName(name1)
+ else:
+ obnamestring = '%s_%s' % (fixName(name1), fixName(name2))
+
+ if EXPORT_BLEN_OBS:
+ file.write('o %s\n' % obnamestring) # Write Object name
+ else: # if EXPORT_GROUP_BY_OB:
+ file.write('g %s\n' % obnamestring)
+
+
+ # Vert
+ for v in me.verts:
+ file.write('v %.6f %.6f %.6f\n' % tuple(v.co))
+
+ # UV
+ if faceuv:
+ uv_face_mapping = [[0,0,0,0] for f in faces] # a bit of a waste for tri's :/
+
+ uv_dict = {} # could use a set() here
+ uv_layer = me.active_uv_texture
+ for f, f_index in face_index_pairs:
+
+ tface = uv_layer.data[f_index]
+
+ # workaround, since tface.uv iteration is wrong atm
+ uvs = tface.uv
+ # uvs = [tface.uv1, tface.uv2, tface.uv3]
+
+ # # add another UV if it's a quad
+ # if len(f.verts) == 4:
+ # uvs.append(tface.uv4)
+
+ for uv_index, uv in enumerate(uvs):
+ uvkey = veckey2d(uv)
+ try:
+ uv_face_mapping[f_index][uv_index] = uv_dict[uvkey]
+ except:
+ uv_face_mapping[f_index][uv_index] = uv_dict[uvkey] = len(uv_dict)
+ file.write('vt %.6f %.6f\n' % tuple(uv))
+
+# uv_dict = {} # could use a set() here
+# for f_index, f in enumerate(faces):
+
+# for uv_index, uv in enumerate(f.uv):
+# uvkey = veckey2d(uv)
+# try:
+# uv_face_mapping[f_index][uv_index] = uv_dict[uvkey]
+# except:
+# uv_face_mapping[f_index][uv_index] = uv_dict[uvkey] = len(uv_dict)
+# file.write('vt %.6f %.6f\n' % tuple(uv))
+
+ uv_unique_count = len(uv_dict)
+# del uv, uvkey, uv_dict, f_index, uv_index
+ # Only need uv_unique_count and uv_face_mapping
+
+ # NORMAL, Smooth/Non smoothed.
+ if EXPORT_NORMALS:
+ for f in faces:
+ if f.smooth:
+ for vIdx in f.verts:
+ v = me.verts[vIdx]
+ noKey = veckey3d(v.normal)
+ if noKey not in globalNormals:
+ globalNormals[noKey] = totno
+ totno +=1
+ file.write('vn %.6f %.6f %.6f\n' % noKey)
+ else:
+ # Hard, 1 normal from the face.
+ noKey = veckey3d(f.normal)
+ if noKey not in globalNormals:
+ globalNormals[noKey] = totno
+ totno +=1
+ file.write('vn %.6f %.6f %.6f\n' % noKey)
+
+ if not faceuv:
+ f_image = None
+
+ # XXX
+ if EXPORT_POLYGROUPS:
+ # Retrieve the list of vertex groups
+# vertGroupNames = me.getVertGroupNames()
+
+ currentVGroup = ''
+ # Create a dictionary keyed by face id and listing, for each vertex, the vertex groups it belongs to
+ vgroupsMap = [[] for _i in range(len(me.verts))]
+# vgroupsMap = [[] for _i in xrange(len(me.verts))]
+ for g in ob.vertex_groups:
+# for vertexGroupName in vertGroupNames:
+ for vIdx, vWeight in getVertsFromGroup(me, g.index):
+# for vIdx, vWeight in me.getVertsFromGroup(vertexGroupName, 1):
+ vgroupsMap[vIdx].append((g.name, vWeight))
+
+ for f_index, f in enumerate(faces):
+ f_v = [{"index": index, "vertex": me.verts[index]} for index in f.verts]
+
+ # if f.verts[3] == 0:
+ # f_v.pop()
+
+# f_v= f.v
+ f_smooth= f.smooth
+ f_mat = min(f.material_index, len(materialNames)-1)
+# f_mat = min(f.mat, len(materialNames)-1)
+ if faceuv:
+
+ tface = me.active_uv_texture.data[face_index_pairs[f_index][1]]
+
+ f_image = tface.image
+ f_uv = tface.uv
+ # f_uv= [tface.uv1, tface.uv2, tface.uv3]
+ # if len(f.verts) == 4:
+ # f_uv.append(tface.uv4)
+# f_image = f.image
+# f_uv= f.uv
+
+ # MAKE KEY
+ if faceuv and f_image: # Object is always true.
+ key = materialNames[f_mat], f_image.name
+ else:
+ key = materialNames[f_mat], None # No image, use None instead.
+
+ # Write the vertex group
+ if EXPORT_POLYGROUPS:
+ if len(ob.vertex_groups):
+ # find what vertext group the face belongs to
+ theVGroup = findVertexGroupName(f,vgroupsMap)
+ if theVGroup != currentVGroup:
+ currentVGroup = theVGroup
+ file.write('g %s\n' % theVGroup)
+# # Write the vertex group
+# if EXPORT_POLYGROUPS:
+# if vertGroupNames:
+# # find what vertext group the face belongs to
+# theVGroup = findVertexGroupName(f,vgroupsMap)
+# if theVGroup != currentVGroup:
+# currentVGroup = theVGroup
+# file.write('g %s\n' % theVGroup)
+
+ # CHECK FOR CONTEXT SWITCH
+ if key == contextMat:
+ pass # Context already switched, dont do anything
+ else:
+ if key[0] == None and key[1] == None:
+ # Write a null material, since we know the context has changed.
+ if EXPORT_GROUP_BY_MAT:
+ # can be mat_image or (null)
+ file.write('g %s_%s\n' % (fixName(ob.name), fixName(ob.data.name)) ) # can be mat_image or (null)
+ file.write('usemtl (null)\n') # mat, image
+
+ else:
+ mat_data= mtl_dict.get(key)
+ if not mat_data:
+ # First add to global dict so we can export to mtl
+ # Then write mtl
+
+ # Make a new names from the mat and image name,
+ # converting any spaces to underscores with fixName.
+
+ # If none image dont bother adding it to the name
+ if key[1] == None:
+ mat_data = mtl_dict[key] = ('%s'%fixName(key[0])), materialItems[f_mat], f_image
+ else:
+ mat_data = mtl_dict[key] = ('%s_%s' % (fixName(key[0]), fixName(key[1]))), materialItems[f_mat], f_image
+
+ if EXPORT_GROUP_BY_MAT:
+ file.write('g %s_%s_%s\n' % (fixName(ob.name), fixName(ob.data.name), mat_data[0]) ) # can be mat_image or (null)
+
+ file.write('usemtl %s\n' % mat_data[0]) # can be mat_image or (null)
+
+ contextMat = key
+ if f_smooth != contextSmooth:
+ if f_smooth: # on now off
+ file.write('s 1\n')
+ contextSmooth = f_smooth
+ else: # was off now on
+ file.write('s off\n')
+ contextSmooth = f_smooth
+
+ file.write('f')
+ if faceuv:
+ if EXPORT_NORMALS:
+ if f_smooth: # Smoothed, use vertex normals
+ for vi, v in enumerate(f_v):
+ file.write( ' %d/%d/%d' % \
+ (v["index"] + totverts,
+ totuvco + uv_face_mapping[f_index][vi],
+ globalNormals[ veckey3d(v["vertex"].normal) ]) ) # vert, uv, normal
+
+ else: # No smoothing, face normals
+ no = globalNormals[ veckey3d(f.normal) ]
+ for vi, v in enumerate(f_v):
+ file.write( ' %d/%d/%d' % \
+ (v["index"] + totverts,
+ totuvco + uv_face_mapping[f_index][vi],
+ no) ) # vert, uv, normal
+ else: # No Normals
+ for vi, v in enumerate(f_v):
+ file.write( ' %d/%d' % (\
+ v["index"] + totverts,\
+ totuvco + uv_face_mapping[f_index][vi])) # vert, uv
+
+ face_vert_index += len(f_v)
+
+ else: # No UV's
+ if EXPORT_NORMALS:
+ if f_smooth: # Smoothed, use vertex normals
+ for v in f_v:
+ file.write( ' %d//%d' %
+ (v["index"] + totverts, globalNormals[ veckey3d(v["vertex"].normal) ]) )
+ else: # No smoothing, face normals
+ no = globalNormals[ veckey3d(f.normal) ]
+ for v in f_v:
+ file.write( ' %d//%d' % (v["index"] + totverts, no) )
+ else: # No Normals
+ for v in f_v:
+ file.write( ' %d' % (v["index"] + totverts) )
+
+ file.write('\n')
+
+ # Write edges.
+ if EXPORT_EDGES:
+ for ed in edges:
+ if ed.loose:
+ file.write('f %d %d\n' % (ed.verts[0] + totverts, ed.verts[1] + totverts))
+
+ # Make the indicies global rather then per mesh
+ totverts += len(me.verts)
+ if faceuv:
+ totuvco += uv_unique_count
+
+ # clean up
+ bpy.data.meshes.remove(me)
+
+ if ob_main.dupli_type != 'NONE':
+ ob_main.free_dupli_list()
+
+ file.close()
+
+
+ # Now we have all our materials, save them
+ if EXPORT_MTL:
+ write_mtl(scene, mtlfilepath, EXPORT_COPY_IMAGES, mtl_dict)
+# if EXPORT_COPY_IMAGES:
+# dest_dir = os.path.basename(filepath)
+# # dest_dir = filepath
+# # # Remove chars until we are just the path.
+# # while dest_dir and dest_dir[-1] not in '\\/':
+# # dest_dir = dest_dir[:-1]
+# if dest_dir:
+# copy_images(dest_dir, mtl_dict)
+# else:
+# print('\tError: "%s" could not be used as a base for an image path.' % filepath)
+
+ print("OBJ Export time: %.2f" % (time.clock() - time1))
+# print "OBJ Export time: %.2f" % (sys.time() - time1)
+
+def do_export(filepath, context,
+ EXPORT_APPLY_MODIFIERS = True, # not used
+ EXPORT_ROTX90 = True, # wrong
+ EXPORT_TRI = False, # ok
+ EXPORT_EDGES = False,
+ EXPORT_NORMALS = False, # not yet
+ EXPORT_NORMALS_HQ = False, # not yet
+ EXPORT_UV = True, # ok
+ EXPORT_MTL = True,
+ EXPORT_SEL_ONLY = True, # ok
+ EXPORT_ALL_SCENES = False, # XXX not working atm
+ EXPORT_ANIMATION = False,
+ EXPORT_COPY_IMAGES = False,
+ EXPORT_BLEN_OBS = True,
+ EXPORT_GROUP_BY_OB = False,
+ EXPORT_GROUP_BY_MAT = False,
+ EXPORT_KEEP_VERT_ORDER = False,
+ EXPORT_POLYGROUPS = False,
+ EXPORT_CURVE_AS_NURBS = True):
+
+ base_name, ext = splitExt(filepath)
+ context_name = [base_name, '', '', ext] # Base name, scene name, frame number, extension
+
+ orig_scene = context.scene
+
+ # Exit edit mode before exporting, so current object states are exported properly.
+ bpy.ops.object.mode_set(mode='OBJECT')
+
+# if EXPORT_ALL_SCENES:
+# export_scenes = bpy.data.scenes
+# else:
+# export_scenes = [orig_scene]
+
+ # XXX only exporting one scene atm since changing
+ # current scene is not possible.
+ # Brecht says that ideally in 2.5 we won't need such a function,
+ # allowing multiple scenes open at once.
+ export_scenes = [orig_scene]
+
+ # Export all scenes.
+ for scn in export_scenes:
+ # scn.makeCurrent() # If already current, this is not slow.
+ # context = scn.getRenderingContext()
+ orig_frame = scn.frame_current
+
+ if EXPORT_ALL_SCENES: # Add scene name into the context_name
+ context_name[1] = '_%s' % bpy.utils.clean_name(scn.name) # WARNING, its possible that this could cause a collision. we could fix if were feeling parranoied.
+
+ # Export an animation?
+ if EXPORT_ANIMATION:
+ scene_frames = range(scn.frame_start, context.frame_end + 1) # Up to and including the end frame.
+ else:
+ scene_frames = [orig_frame] # Dont export an animation.
+
+ # Loop through all frames in the scene and export.
+ for frame in scene_frames:
+ if EXPORT_ANIMATION: # Add frame to the filepath.
+ context_name[2] = '_%.6d' % frame
+
+ scn.frame_current = frame
+ if EXPORT_SEL_ONLY:
+ export_objects = context.selected_objects
+ else:
+ export_objects = scn.objects
+
+ full_path= ''.join(context_name)
+
+ # erm... bit of a problem here, this can overwrite files when exporting frames. not too bad.
+ # EXPORT THE FILE.
+ write(full_path, export_objects, scn,
+ EXPORT_TRI, EXPORT_EDGES, EXPORT_NORMALS,
+ EXPORT_NORMALS_HQ, EXPORT_UV, EXPORT_MTL,
+ EXPORT_COPY_IMAGES, EXPORT_APPLY_MODIFIERS,
+ EXPORT_ROTX90, EXPORT_BLEN_OBS,
+ EXPORT_GROUP_BY_OB, EXPORT_GROUP_BY_MAT, EXPORT_KEEP_VERT_ORDER,
+ EXPORT_POLYGROUPS, EXPORT_CURVE_AS_NURBS)
+
+
+ scn.frame_current = orig_frame
+
+ # Restore old active scene.
+# orig_scene.makeCurrent()
+# Window.WaitCursor(0)
+
+
+'''
+Currently the exporter lacks these features:
+* nurbs
+* multiple scene export (only active scene is written)
+* particles
+'''
+
+from bpy.props import *
+
+class ExportOBJ(bpy.types.Operator):
+ '''Save a Wavefront OBJ File'''
+
+ bl_idname = "export.obj"
+ bl_label = 'Export OBJ'
+
+ # List of operator properties, the attributes will be assigned
+ # to the class instance from the operator settings before calling.
+
+ filepath = StringProperty(name="File Path", description="Filepath used for exporting the OBJ file", maxlen= 1024, default= "")
+ check_existing = BoolProperty(name="Check Existing", description="Check and warn on overwriting existing files", default=True, options={'HIDDEN'})
+
+ # context group
+ use_selection = BoolProperty(name="Selection Only", description="", default= True)
+ use_all_scenes = BoolProperty(name="All Scenes", description="", default= False)
+ use_animation = BoolProperty(name="All Animation", description="", default= False)
+
+ # object group
+ use_modifiers = BoolProperty(name="Apply Modifiers", description="", default= True)
+ use_rotate90 = BoolProperty(name="Rotate X90", description="", default= True)
+
+ # extra data group
+ use_edges = BoolProperty(name="Edges", description="", default= True)
+ use_normals = BoolProperty(name="Normals", description="", default= False)
+ use_hq_normals = BoolProperty(name="High Quality Normals", description="", default= True)
+ use_uvs = BoolProperty(name="UVs", description="", default= True)
+ use_materials = BoolProperty(name="Materials", description="", default=False)
+ copy_images = BoolProperty(name="Copy Images", description="", default= False)
+ use_triangles = BoolProperty(name="Triangulate", description="", default= False)
+ use_vertex_groups = BoolProperty(name="Polygroups", description="", default= False)
+ use_nurbs = BoolProperty(name="Nurbs", description="", default= False)
+
+ # grouping group
+ use_blen_objects = BoolProperty(name="Objects as OBJ Objects", description="", default= True)
+ group_by_object = BoolProperty(name="Objects as OBJ Groups ", description="", default= False)
+ group_by_material = BoolProperty(name="Material Groups", description="", default= False)
+ keep_vertex_order = BoolProperty(name="Keep Vertex Order", description="", default= False)
+
+
+ def execute(self, context):
+
+ filepath = self.properties.filepath
+ if not filepath.lower().endswith(".obj"):
+ filepath += ".obj"
+
+ do_export(filepath, context,
+ EXPORT_TRI=self.properties.use_triangles,
+ EXPORT_EDGES=self.properties.use_edges,
+ EXPORT_NORMALS=self.properties.use_normals,
+ EXPORT_NORMALS_HQ=self.properties.use_hq_normals,
+ EXPORT_UV=self.properties.use_uvs,
+ EXPORT_MTL=self.properties.use_materials,
+ EXPORT_COPY_IMAGES=self.properties.copy_images,
+ EXPORT_APPLY_MODIFIERS=self.properties.use_modifiers,
+ EXPORT_ROTX90=self.properties.use_rotate90,
+ EXPORT_BLEN_OBS=self.properties.use_blen_objects,
+ EXPORT_GROUP_BY_OB=self.properties.group_by_object,
+ EXPORT_GROUP_BY_MAT=self.properties.group_by_material,
+ EXPORT_KEEP_VERT_ORDER=self.properties.keep_vertex_order,
+ EXPORT_POLYGROUPS=self.properties.use_vertex_groups,
+ EXPORT_CURVE_AS_NURBS=self.properties.use_nurbs,
+ EXPORT_SEL_ONLY=self.properties.use_selection,
+ EXPORT_ALL_SCENES=self.properties.use_all_scenes)
+
+ return {'FINISHED'}
+
+ def invoke(self, context, event):
+ wm = context.manager
+ wm.add_fileselect(self)
+ return {'RUNNING_MODAL'}
+
+
+def menu_func(self, context):
+ default_path = os.path.splitext(bpy.data.filepath)[0] + ".obj"
+ self.layout.operator(ExportOBJ.bl_idname, text="Wavefront (.obj)").filepath = default_path
+
+
+def register():
+ bpy.types.register(ExportOBJ)
+ bpy.types.INFO_MT_file_export.append(menu_func)
+
+def unregister():
+ bpy.types.unregister(ExportOBJ)
+ bpy.types.INFO_MT_file_export.remove(menu_func)
+
+
+# CONVERSION ISSUES
+# - matrix problem
+# - duplis - only tested dupliverts
+# - NURBS - needs API additions
+# - all scenes export
+# + normals calculation
+
+if __name__ == "__main__":
+ register()
+
diff --git a/Data/BlenderScript/lipsyncbatch.au3 b/Data/BlenderScript/lipsyncbatch.au3
new file mode 100644
index 00000000..874f5667
--- /dev/null
+++ b/Data/BlenderScript/lipsyncbatch.au3
@@ -0,0 +1,40 @@
+HotKeySet("{PAUSE}", "TogglePause")
+
+Run("C:/Program Files (x86)/Annosoft/Lipsync Tool 4.0/LipsyncTool4.0.exe")
+WinWaitActive("Reminder")
+Send("{ENTER}")
+WinWaitActive("The Lipsync Tool!")
+$size = WinGetPos("The Lipsync Tool!")
+;MsgBox(0,"Size","Position = " & $size[0] & " " & $size[1])
+Dim $click_pos[2]
+$click_pos[0] = 746 + $size[0]
+$click_pos[1] = 435 + $size[1]
+For $i = 10 to 0 Step -1
+ ProcessSound($i)
+Next
+dim $done = False
+While $done == False
+ MouseClick("primary", $click_pos[0], $click_pos[1], 1, 0)
+ MouseWheel("down", 3)
+ For $i = 10 to 2 Step -1
+ ProcessSound($i)
+ Next
+WEnd
+
+Func TogglePause()
+ $rc = msgbox(1,"Paused","Click Ok to continue or Cancel to stop program")
+ if $rc = 2 then exit
+EndFunc
+
+Func ProcessSound($val)
+ MouseClick("primary", $click_pos[0], $click_pos[1]+$val*18, 2, 0)
+ WinWaitActive("Progress")
+ WinWaitClose("Progress")
+ MouseClick("primary", 389 + $size[0],63 + $size[1], 1, 0)
+ Send("^e")
+ WinWaitActive("Export format")
+ Send("{ENTER}")
+ WinWaitActive("Save As")
+ Send("!s")
+ WinWaitClose("Save As")
+EndFunc \ No newline at end of file