Welcome to mirror list, hosted at ThFree Co, Russian Federation.

git.blender.org/blender-addons.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--add_curve_aceous_galore.py7
-rw-r--r--add_curve_torus_knots.py13
-rw-r--r--add_mesh_3d_function_surface.py11
-rw-r--r--add_mesh_BoltFactory/__init__.py13
-rw-r--r--add_mesh_ant_landscape.py11
-rw-r--r--add_mesh_extras.py11
-rw-r--r--add_mesh_gears.py11
-rw-r--r--add_mesh_gemstones.py11
-rw-r--r--add_mesh_pipe_joint.py11
-rw-r--r--add_mesh_solid.py11
-rw-r--r--add_mesh_twisted_torus.py11
-rw-r--r--curve_simplify.py11
-rw-r--r--io_anim_camera.py11
-rw-r--r--io_export_directx_x.py13
-rw-r--r--io_export_unreal_psk_psa.py2703
-rw-r--r--io_import_images_as_planes.py13
-rw-r--r--io_import_scene_mhx.py3607
-rw-r--r--io_import_scene_unreal_psk.py993
-rw-r--r--io_mesh_raw/__init__.py11
-rw-r--r--io_mesh_stl/__init__.py13
-rw-r--r--mesh_relax.py11
-rw-r--r--mesh_surface_sketch.py13
-rw-r--r--object_add_chain.py11
-rw-r--r--object_cloud_gen.py11
-rw-r--r--object_fracture/__init__.py11
-rw-r--r--render_povray/__init__.py11
-rw-r--r--render_renderfarmfi.py13
-rw-r--r--space_view3d_align_tools.py11
-rw-r--r--space_view3d_materials_utils.py11
-rw-r--r--space_view3d_panel_measure.py7
-rw-r--r--space_view3d_property_chart.py11
-rw-r--r--space_view3d_spacebar_menu.py11
-rw-r--r--system_blend_info.py5
33 files changed, 3863 insertions, 3770 deletions
diff --git a/add_curve_aceous_galore.py b/add_curve_aceous_galore.py
index 44f201a9..9de8c015 100644
--- a/add_curve_aceous_galore.py
+++ b/add_curve_aceous_galore.py
@@ -17,10 +17,11 @@
# ##### END GPL LICENSE BLOCK #####
bl_addon_info = {
- 'name': 'Add Curve: Curveaceous Galore!',
+ 'name': 'Curveaceous Galore!',
'author': 'Jimmy Hazevoet, testscreenings',
- 'version': '0.1',
+ 'version': (0,1),
'blender': (2, 5, 3),
+ 'api': 31667,
'location': 'Add Curve menu',
'description': 'adds many types of curves',
'warning': '', # used for warning icon and text in addons panel
@@ -1116,4 +1117,4 @@ def unregister():
bpy.types.INFO_MT_curve_add.remove(Curveaceous_galore_button)
if __name__ == "__main__":
- register() \ No newline at end of file
+ register()
diff --git a/add_curve_torus_knots.py b/add_curve_torus_knots.py
index 362b9377..af0eab51 100644
--- a/add_curve_torus_knots.py
+++ b/add_curve_torus_knots.py
@@ -18,15 +18,18 @@
bl_addon_info = {
- "name": "Add Curve: Torus Knots",
+ "name": "Torus Knots",
"author": "testscreenings",
- "version": "0.1",
+ "version": (0,1),
"blender": (2, 5, 3),
+ "api": 31667,
"location": "View3D > Add > Curve",
"description": "Adds many types of knots",
"warning": "",
- "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.5/Py/Scripts/Curve/Torus_Knot",
- "tracker_url": "https://projects.blender.org/tracker/index.php?func=detail&aid=22403&group_id=153&atid=469",
+ "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.5/Py/"\
+ "Scripts/Curve/Torus_Knot",
+ "tracker_url": "https://projects.blender.org/tracker/index.php?"\
+ "func=detail&aid=22403&group_id=153&atid=469",
"category": "Add Curve"}
@@ -318,4 +321,4 @@ def unregister():
bpy.types.INFO_MT_curve_add.remove(torus_knot_plus_button)
if __name__ == "__main__":
- register() \ No newline at end of file
+ register()
diff --git a/add_mesh_3d_function_surface.py b/add_mesh_3d_function_surface.py
index ea7cbfef..d03f1234 100644
--- a/add_mesh_3d_function_surface.py
+++ b/add_mesh_3d_function_surface.py
@@ -17,15 +17,18 @@
# ##### END GPL LICENSE BLOCK #####
bl_addon_info = {
- "name": "Add Mesh: 3D Function Surfaces",
+ "name": "3D Function Surfaces",
"author": "Buerbaum Martin (Pontiac)",
- "version": "0.3.5",
+ "version": (0,3,5),
"blender": (2, 5, 3),
+ "api": 31667,
"location": "View3D > Add > Mesh > Z Function Surface & XYZ Function Surface",
"description": "Create Objects using Math Formulas",
"warning": "",
- "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.5/Py/Scripts/Add_Mesh/Add_3d_Function_Surface",
- "tracker_url": "https://projects.blender.org/tracker/index.php?func=detail&aid=21444&group_id=153&atid=469",
+ "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.5/Py/"\
+ "Scripts/Add_Mesh/Add_3d_Function_Surface",
+ "tracker_url": "https://projects.blender.org/tracker/index.php?"\
+ "func=detail&aid=21444&group_id=153&atid=469",
"category": "Add Mesh"}
"""
diff --git a/add_mesh_BoltFactory/__init__.py b/add_mesh_BoltFactory/__init__.py
index d81d5d80..80ca1e34 100644
--- a/add_mesh_BoltFactory/__init__.py
+++ b/add_mesh_BoltFactory/__init__.py
@@ -17,13 +17,16 @@
# ##### END GPL LICENSE BLOCK #####
bl_addon_info = {
- "name": "Add Mesh: BoltFactory",
+ "name": "BoltFactory",
"author": "Aaron Keith",
- "version": "3.9",
+ "version": (3,9),
"blender": (2, 5, 3),
+ "api": 31667,
"location": "add Mesh",
- "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.5/Py/Scripts/Add_Mesh/BoltFactory",
- "tracker_url": "https://projects.blender.org/tracker/index.php?func=detail&aid=22842&group_id=153&atid=468",
+ "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.5/Py/"\
+ "Scripts/Add_Mesh/BoltFactory",
+ "tracker_url": "https://projects.blender.org/tracker/index.php?"\
+ "func=detail&aid=22842&group_id=153&atid=468",
"category": "Add Mesh"}
import bpy
@@ -53,4 +56,4 @@ def unregister():
#bpy.types.VIEW3D_PT_tools_objectmode.remove(add_mesh_bolt_button) #just for testing
if __name__ == "__main__":
- register() \ No newline at end of file
+ register()
diff --git a/add_mesh_ant_landscape.py b/add_mesh_ant_landscape.py
index bbb9d293..6fccbab4 100644
--- a/add_mesh_ant_landscape.py
+++ b/add_mesh_ant_landscape.py
@@ -17,15 +17,18 @@
# ##### END GPL LICENSE BLOCK #####
bl_addon_info = {
- "name": "Add Mesh: ANT Landscape",
+ "name": "ANT Landscape",
"author": "Jimmy Hazevoet",
- "version": "0.1.0 July-2010",
+ "version": (0,1,0),
"blender": (2, 5, 3),
+ "api": 31667,
"location": "Add Mesh menu",
"description": "Adds a landscape primitive",
"warning": "", # used for warning icon and text in addons panel
- "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.5/Py/Scripts/",
- "tracker_url": "https://projects.blender.org/tracker/index.php?func=detail&aid=23130&group_id=153&atid=469",
+ "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.5/Py/"\
+ "Scripts/Add_Mesh/ANT_Landscape",
+ "tracker_url": "https://projects.blender.org/tracker/index.php?"\
+ "func=detail&aid=23130&group_id=153&atid=469",
"category": "Add Mesh"}
# import modules
diff --git a/add_mesh_extras.py b/add_mesh_extras.py
index a109f3e6..38c1505f 100644
--- a/add_mesh_extras.py
+++ b/add_mesh_extras.py
@@ -17,15 +17,18 @@
# ##### END GPL LICENSE BLOCK #####
bl_addon_info = {
- "name": "Add Mesh: Extras",
+ "name": "Extras",
"author": "Pontiac, Fourmadmen, meta-androcto",
- "version": "0.3",
+ "version": (0,3),
"blender": (2, 5, 3),
+ "api": 31667,
"location": "View3D > Add > Mesh > Extras",
"description": "Adds Star, Wedge, Sqorus & Spindle objects.",
"warning": "",
- "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.5/Py/Scripts/Add_Mesh/Add_Extra",
- "tracker_url": "https://projects.blender.org/tracker/index.php?func=detail&aid=22457&group_id=153&atid=469",
+ "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.5/Py/"\
+ "Scripts/Add_Mesh/Add_Extra",
+ "tracker_url": "https://projects.blender.org/tracker/index.php?"\
+ "func=detail&aid=22457&group_id=153&atid=469",
"category": "Add Mesh"}
import bpy
diff --git a/add_mesh_gears.py b/add_mesh_gears.py
index 08495f31..c37ec80a 100644
--- a/add_mesh_gears.py
+++ b/add_mesh_gears.py
@@ -20,15 +20,18 @@
# ***** END GPL LICENCE BLOCK *****
bl_addon_info = {
- "name": "Add Mesh: Gears",
+ "name": "Gears",
"author": "Michel J. Anders (varkenvarken)",
- "version": "2.4.1",
+ "version": (2,4,1),
"blender": (2, 5, 3),
+ "api": 31667,
"location": "View3D > Add > Mesh > Gears ",
"description": "Adds a mesh Gear to the Add Mesh menu",
"warning": "",
- "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.5/Py/Scripts/Add_Mesh/Add_Gear",
- "tracker_url": "https://projects.blender.org/tracker/index.php?func=detail&aid=21732&group_id=153&atid=469",
+ "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.5/Py/"\
+ "Scripts/Add_Mesh/Add_Gear",
+ "tracker_url": "https://projects.blender.org/tracker/index.php?"\
+ "func=detail&aid=21732&group_id=153&atid=469",
"category": "Add Mesh"}
"""
diff --git a/add_mesh_gemstones.py b/add_mesh_gemstones.py
index 8d53c4d8..bbbfedc2 100644
--- a/add_mesh_gemstones.py
+++ b/add_mesh_gemstones.py
@@ -17,15 +17,18 @@
# ##### END GPL LICENSE BLOCK #####
bl_addon_info = {
- "name": "Add Mesh: Gemstones",
+ "name": "Gemstones",
"author": "Pontiac, Fourmadmen, Dreampainter",
- "version": "0.3",
+ "version": (0,3),
"blender": (2, 5, 3),
+ "api": 31667,
"location": "View3D > Add > Mesh > Gemstones",
"description": "Adds various gemstone (Diamond & Gem) meshes.",
"warning": "",
- "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.5/Py/Scripts/Add_Mesh/Add_Gemstones",
- "tracker_url": "https://projects.blender.org/tracker/index.php?func=detail&aid=21432&group_id=153&atid=469",
+ "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.5/Py/"\
+ "Scripts/Add_Mesh/Add_Gemstones",
+ "tracker_url": "https://projects.blender.org/tracker/index.php?"\
+ "func=detail&aid=21432&group_id=153&atid=469",
"category": "Add Mesh"}
import bpy
diff --git a/add_mesh_pipe_joint.py b/add_mesh_pipe_joint.py
index 96c351ad..057ad11a 100644
--- a/add_mesh_pipe_joint.py
+++ b/add_mesh_pipe_joint.py
@@ -17,15 +17,18 @@
# ##### END GPL LICENSE BLOCK #####
bl_addon_info = {
- "name": "Add Mesh: Pipe Joints",
+ "name": "Pipe Joints",
"author": "Buerbaum Martin (Pontiac)",
- "version": "0.10.5",
+ "version": (0,10,5),
"blender": (2, 5, 3),
+ "api": 31667,
"location": "View3D > Add > Mesh > Pipe Joint",
"description": "Adds 5 pipe Joint types to the Add Mesh menu",
"warning": "",
- "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.5/Py/Scripts/Add_Mesh/Add_Pipe_Joints",
- "tracker_url": "https://projects.blender.org/tracker/index.php?func=detail&aid=21443&group_id=153&atid=469",
+ "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.5/Py/"\
+ "Scripts/Add_Mesh/Add_Pipe_Joints",
+ "tracker_url": "https://projects.blender.org/tracker/index.php?"\
+ "func=detail&aid=21443&group_id=153&atid=469",
"category": "Add Mesh"}
"""
diff --git a/add_mesh_solid.py b/add_mesh_solid.py
index 99bae8da..1d68e93e 100644
--- a/add_mesh_solid.py
+++ b/add_mesh_solid.py
@@ -18,15 +18,18 @@
# ***** END GPL LICENCE BLOCK *****
bl_addon_info = {
- "name": "Add Mesh: Regular Solids",
+ "name": "Regular Solids",
"author": "DreamPainter",
- "version": "1",
+ "version": (1,),
"blender": (2, 5, 3),
+ "api": 31667,
"location": "View3D > Add > Mesh > Regular Solids",
"description": "Add a Regular Solid mesh.",
"warning": "",
- "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.5/Py/Scripts/Add_Mesh/Add_Solid",
- "tracker_url": "https://projects.blender.org/tracker/index.php?func=detail&aid=22405&group_id=153&atid=469",
+ "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.5/Py/"\
+ "Scripts/Add_Mesh/Add_Solid",
+ "tracker_url": "https://projects.blender.org/tracker/index.php?"\
+ "func=detail&aid=22405&group_id=153&atid=469",
"category": "Add Mesh"}
diff --git a/add_mesh_twisted_torus.py b/add_mesh_twisted_torus.py
index 66fbe9a6..7377f304 100644
--- a/add_mesh_twisted_torus.py
+++ b/add_mesh_twisted_torus.py
@@ -21,15 +21,18 @@
# ***** END GPL LICENCE BLOCK *****
bl_addon_info = {
- "name": "Add Mesh: Twisted Torus",
+ "name": "Twisted Torus",
"author": "Paulo_Gomes",
- "version": "0.11",
+ "version": (0,11),
"blender": (2, 5, 3),
+ "api": 31667,
"location": "View3D > Add > Mesh ",
"description": "Adds a mesh Twisted Torus to the Add Mesh menu",
"warning": "",
- "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.5/Py/Scripts/Add_Mesh/Add_Twisted_Torus",
- "tracker_url": "https://projects.blender.org/tracker/index.php?func=detail&aid=21622&group_id=153&atid=469",
+ "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.5/Py/"\
+ "Scripts/Add_Mesh/Add_Twisted_Torus",
+ "tracker_url": "https://projects.blender.org/tracker/index.php?"\
+ "func=detail&aid=21622&group_id=153&atid=469",
"category": "Add Mesh"}
"""
diff --git a/curve_simplify.py b/curve_simplify.py
index be17146c..f723b99d 100644
--- a/curve_simplify.py
+++ b/curve_simplify.py
@@ -17,15 +17,18 @@
# ##### END GPL LICENSE BLOCK #####
bl_addon_info = {
- "name": "Curve: simplify curves",
+ "name": "Simplify curves",
"author": "testscreenings",
- "version": "1",
+ "version": (1,),
"blender": (2, 5, 3),
+ "api": 31667,
"location": "Toolshelf > search > simplify curves",
"description": "This script simplifies 3D curves and fcurves",
"warning": "",
- "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.5/Py/Scripts/Curve/Curve_Simplify",
- "tracker_url": "https://projects.blender.org/tracker/index.php?func=detail&aid=22327&group_id=153&atid=468",
+ "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.5/Py/"\
+ "Scripts/Curve/Curve_Simplify",
+ "tracker_url": "https://projects.blender.org/tracker/index.php?"\
+ "func=detail&aid=22327&group_id=153&atid=468",
"category": "Add Curve"}
"""
diff --git a/io_anim_camera.py b/io_anim_camera.py
index 92c473c7..b0ee4cbc 100644
--- a/io_anim_camera.py
+++ b/io_anim_camera.py
@@ -19,15 +19,18 @@
# <pep8 compliant>
bl_addon_info = {
- "name": "Export: Camera Animation",
+ "name": "Export Camera Animation",
"author": "Campbell Barton",
- "version": "0.1",
+ "version": (0,1),
"blender": (2, 5, 3),
+ "api": 31667,
"location": "File > Export > Camera Animation",
"description": "Export Cameras & Markers",
"warning": "",
- "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.5/Py/Scripts/File_I-O/Camera_Animation",
- "tracker_url": "https://projects.blender.org/tracker/index.php?func=detail&aid=22835&group_id=153&atid=469",
+ "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.5/Py/"\
+ "Scripts/File_I-O/Camera_Animation",
+ "tracker_url": "https://projects.blender.org/tracker/index.php?"\
+ "func=detail&aid=22835&group_id=153&atid=469",
"category": "Import/Export"}
diff --git a/io_export_directx_x.py b/io_export_directx_x.py
index 92e37bbc..aa287e0e 100644
--- a/io_export_directx_x.py
+++ b/io_export_directx_x.py
@@ -16,15 +16,18 @@
# ***** GPL LICENSE BLOCK *****
bl_addon_info = {
- "name": "Export: DirectX Model Format (.x)",
+ "name": "Export DirectX Model Format (.x)",
"author": "Chris Foster (Kira Vakaan)",
- "version": "1.6",
+ "version": (1,6),
"blender": (2, 5, 3),
+ "api": 31667,
"location": "File > Export",
"description": "Export to the DirectX Model Format (.x)",
"warning": "",
- "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.5/Py/Scripts/File_I-O/DirectX_Exporter",
- "tracker_url": "https://projects.blender.org/tracker/index.php?func=detail&aid=22795&group_id=153&atid=469",
+ "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.5/Py/"\
+ "Scripts/File_I-O/DirectX_Exporter",
+ "tracker_url": "https://projects.blender.org/tracker/index.php?"\
+ "func=detail&aid=22795&group_id=153&atid=469",
"category": "Import/Export"}
import os
@@ -1148,4 +1151,4 @@ def unregister():
if __name__ == "__main__":
- register() \ No newline at end of file
+ register()
diff --git a/io_export_unreal_psk_psa.py b/io_export_unreal_psk_psa.py
index 7be383f6..d39f2412 100644
--- a/io_export_unreal_psk_psa.py
+++ b/io_export_unreal_psk_psa.py
@@ -16,15 +16,18 @@
# ***** GPL LICENSE BLOCK *****
bl_addon_info = {
- "name": "Export: Skeleletal Mesh/Animation Data",
+ "name": "Export Skeleletal Mesh/Animation Data",
"author": "Darknet/Optimus_P-Fat/Active_Trash/Sinsoft",
- "version": "2.0",
+ "version": (2,0),
"blender": (2, 5, 3),
+ "api": 31667,
"location": "File > Export > Skeletal Mesh/Animation Data (.psk/.psa)",
"description": "Export Unreal Engine (.psk)",
"warning": "",
- "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.5/Py/Scripts/File_I-O/Unreal_psk_psa",
- "tracker_url": "https://projects.blender.org/tracker/index.php?func=detail&aid=21366&group_id=153&atid=469",
+ "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.5/Py/"\
+ "Scripts/File_I-O/Unreal_psk_psa",
+ "tracker_url": "https://projects.blender.org/tracker/index.php?"\
+ "func=detail&aid=21366&group_id=153&atid=469",
"category": "Import/Export"}
"""
@@ -111,256 +114,256 @@ SIZE_VTRIANGLE = 12
# Generic Object->Integer mapping
# the object must be usable as a dictionary key
class ObjMap:
- def __init__(self):
- self.dict = {}
- self.next = 0
- def get(self, obj):
- if obj in self.dict:
- return self.dict[obj]
- else:
- id = self.next
- self.next = self.next + 1
- self.dict[obj] = id
- return id
-
- def items(self):
- getval = operator.itemgetter(0)
- getkey = operator.itemgetter(1)
- return map(getval, sorted(self.dict.items(), key=getkey))
+ def __init__(self):
+ self.dict = {}
+ self.next = 0
+ def get(self, obj):
+ if obj in self.dict:
+ return self.dict[obj]
+ else:
+ id = self.next
+ self.next = self.next + 1
+ self.dict[obj] = id
+ return id
+
+ def items(self):
+ getval = operator.itemgetter(0)
+ getkey = operator.itemgetter(1)
+ return map(getval, sorted(self.dict.items(), key=getkey))
########################################################################
# RG - UNREAL DATA STRUCTS - CONVERTED FROM C STRUCTS GIVEN ON UDN SITE
# provided here: http://udn.epicgames.com/Two/BinaryFormatSpecifications.html
# updated UDK (Unreal Engine 3): http://udn.epicgames.com/Three/BinaryFormatSpecifications.html
class FQuat:
- def __init__(self):
- self.X = 0.0
- self.Y = 0.0
- self.Z = 0.0
- self.W = 1.0
-
- def dump(self):
- data = pack('ffff', self.X, self.Y, self.Z, self.W)
- return data
-
- def __cmp__(self, other):
- return cmp(self.X, other.X) \
- or cmp(self.Y, other.Y) \
- or cmp(self.Z, other.Z) \
- or cmp(self.W, other.W)
-
- def __hash__(self):
- return hash(self.X) ^ hash(self.Y) ^ hash(self.Z) ^ hash(self.W)
-
- def __str__(self):
- return "[%f,%f,%f,%f](FQuat)" % (self.X, self.Y, self.Z, self.W)
-
+ def __init__(self):
+ self.X = 0.0
+ self.Y = 0.0
+ self.Z = 0.0
+ self.W = 1.0
+
+ def dump(self):
+ data = pack('ffff', self.X, self.Y, self.Z, self.W)
+ return data
+
+ def __cmp__(self, other):
+ return cmp(self.X, other.X) \
+ or cmp(self.Y, other.Y) \
+ or cmp(self.Z, other.Z) \
+ or cmp(self.W, other.W)
+
+ def __hash__(self):
+ return hash(self.X) ^ hash(self.Y) ^ hash(self.Z) ^ hash(self.W)
+
+ def __str__(self):
+ return "[%f,%f,%f,%f](FQuat)" % (self.X, self.Y, self.Z, self.W)
+
class FVector(object):
- def __init__(self, X=0.0, Y=0.0, Z=0.0):
- self.X = X
- self.Y = Y
- self.Z = Z
-
- def dump(self):
- data = pack('fff', self.X, self.Y, self.Z)
- return data
-
- def __cmp__(self, other):
- return cmp(self.X, other.X) \
- or cmp(self.Y, other.Y) \
- or cmp(self.Z, other.Z)
-
- def _key(self):
- return (type(self).__name__, self.X, self.Y, self.Z)
-
- def __hash__(self):
- return hash(self._key())
-
- def __eq__(self, other):
- if not hasattr(other, '_key'):
- return False
- return self._key() == other._key()
-
- def dot(self, other):
- return self.X * other.X + self.Y * other.Y + self.Z * other.Z
-
- def cross(self, other):
- return FVector(self.Y * other.Z - self.Z * other.Y,
- self.Z * other.X - self.X * other.Z,
- self.X * other.Y - self.Y * other.X)
-
- def sub(self, other):
- return FVector(self.X - other.X,
- self.Y - other.Y,
- self.Z - other.Z)
+ def __init__(self, X=0.0, Y=0.0, Z=0.0):
+ self.X = X
+ self.Y = Y
+ self.Z = Z
+
+ def dump(self):
+ data = pack('fff', self.X, self.Y, self.Z)
+ return data
+
+ def __cmp__(self, other):
+ return cmp(self.X, other.X) \
+ or cmp(self.Y, other.Y) \
+ or cmp(self.Z, other.Z)
+
+ def _key(self):
+ return (type(self).__name__, self.X, self.Y, self.Z)
+
+ def __hash__(self):
+ return hash(self._key())
+
+ def __eq__(self, other):
+ if not hasattr(other, '_key'):
+ return False
+ return self._key() == other._key()
+
+ def dot(self, other):
+ return self.X * other.X + self.Y * other.Y + self.Z * other.Z
+
+ def cross(self, other):
+ return FVector(self.Y * other.Z - self.Z * other.Y,
+ self.Z * other.X - self.X * other.Z,
+ self.X * other.Y - self.Y * other.X)
+
+ def sub(self, other):
+ return FVector(self.X - other.X,
+ self.Y - other.Y,
+ self.Z - other.Z)
class VJointPos:
- def __init__(self):
- self.Orientation = FQuat()
- self.Position = FVector()
- self.Length = 0.0
- self.XSize = 0.0
- self.YSize = 0.0
- self.ZSize = 0.0
-
- def dump(self):
- data = self.Orientation.dump() + self.Position.dump() + pack('4f', self.Length, self.XSize, self.YSize, self.ZSize)
- return data
-
+ def __init__(self):
+ self.Orientation = FQuat()
+ self.Position = FVector()
+ self.Length = 0.0
+ self.XSize = 0.0
+ self.YSize = 0.0
+ self.ZSize = 0.0
+
+ def dump(self):
+ data = self.Orientation.dump() + self.Position.dump() + pack('4f', self.Length, self.XSize, self.YSize, self.ZSize)
+ return data
+
class AnimInfoBinary:
- def __init__(self):
- self.Name = "" # length=64
- self.Group = "" # length=64
- self.TotalBones = 0
- self.RootInclude = 0
- self.KeyCompressionStyle = 0
- self.KeyQuotum = 0
- self.KeyPrediction = 0.0
- self.TrackTime = 0.0
- self.AnimRate = 0.0
- self.StartBone = 0
- self.FirstRawFrame = 0
- self.NumRawFrames = 0
-
- def dump(self):
- data = pack('64s64siiiifffiii', self.Name, self.Group, self.TotalBones, self.RootInclude, self.KeyCompressionStyle, self.KeyQuotum, self.KeyPrediction, self.TrackTime, self.AnimRate, self.StartBone, self.FirstRawFrame, self.NumRawFrames)
- return data
+ def __init__(self):
+ self.Name = "" # length=64
+ self.Group = "" # length=64
+ self.TotalBones = 0
+ self.RootInclude = 0
+ self.KeyCompressionStyle = 0
+ self.KeyQuotum = 0
+ self.KeyPrediction = 0.0
+ self.TrackTime = 0.0
+ self.AnimRate = 0.0
+ self.StartBone = 0
+ self.FirstRawFrame = 0
+ self.NumRawFrames = 0
+
+ def dump(self):
+ data = pack('64s64siiiifffiii', self.Name, self.Group, self.TotalBones, self.RootInclude, self.KeyCompressionStyle, self.KeyQuotum, self.KeyPrediction, self.TrackTime, self.AnimRate, self.StartBone, self.FirstRawFrame, self.NumRawFrames)
+ return data
class VChunkHeader:
- def __init__(self, name, type_size):
- self.ChunkID = name # length=20
- self.TypeFlag = 1999801 # special value
- self.DataSize = type_size
- self.DataCount = 0
-
- def dump(self):
- data = pack('20siii', self.ChunkID, self.TypeFlag, self.DataSize, self.DataCount)
- return data
-
+ def __init__(self, name, type_size):
+ self.ChunkID = name # length=20
+ self.TypeFlag = 1999801 # special value
+ self.DataSize = type_size
+ self.DataCount = 0
+
+ def dump(self):
+ data = pack('20siii', self.ChunkID, self.TypeFlag, self.DataSize, self.DataCount)
+ return data
+
class VMaterial:
- def __init__(self):
- self.MaterialName = "" # length=64
- self.TextureIndex = 0
- self.PolyFlags = 0 # DWORD
- self.AuxMaterial = 0
- self.AuxFlags = 0 # DWORD
- self.LodBias = 0
- self.LodStyle = 0
-
- def dump(self):
- data = pack('64siLiLii', self.MaterialName, self.TextureIndex, self.PolyFlags, self.AuxMaterial, self.AuxFlags, self.LodBias, self.LodStyle)
- return data
+ def __init__(self):
+ self.MaterialName = "" # length=64
+ self.TextureIndex = 0
+ self.PolyFlags = 0 # DWORD
+ self.AuxMaterial = 0
+ self.AuxFlags = 0 # DWORD
+ self.LodBias = 0
+ self.LodStyle = 0
+
+ def dump(self):
+ data = pack('64siLiLii', self.MaterialName, self.TextureIndex, self.PolyFlags, self.AuxMaterial, self.AuxFlags, self.LodBias, self.LodStyle)
+ return data
class VBone:
- def __init__(self):
- self.Name = "" # length = 64
- self.Flags = 0 # DWORD
- self.NumChildren = 0
- self.ParentIndex = 0
- self.BonePos = VJointPos()
-
- def dump(self):
- data = pack('64sLii', self.Name, self.Flags, self.NumChildren, self.ParentIndex) + self.BonePos.dump()
- return data
-
-#same as above - whatever - this is how Epic does it...
+ def __init__(self):
+ self.Name = "" # length = 64
+ self.Flags = 0 # DWORD
+ self.NumChildren = 0
+ self.ParentIndex = 0
+ self.BonePos = VJointPos()
+
+ def dump(self):
+ data = pack('64sLii', self.Name, self.Flags, self.NumChildren, self.ParentIndex) + self.BonePos.dump()
+ return data
+
+#same as above - whatever - this is how Epic does it...
class FNamedBoneBinary:
- def __init__(self):
- self.Name = "" # length = 64
- self.Flags = 0 # DWORD
- self.NumChildren = 0
- self.ParentIndex = 0
- self.BonePos = VJointPos()
-
- self.IsRealBone = 0 # this is set to 1 when the bone is actually a bone in the mesh and not a dummy
-
- def dump(self):
- data = pack('64sLii', self.Name, self.Flags, self.NumChildren, self.ParentIndex) + self.BonePos.dump()
- return data
-
+ def __init__(self):
+ self.Name = "" # length = 64
+ self.Flags = 0 # DWORD
+ self.NumChildren = 0
+ self.ParentIndex = 0
+ self.BonePos = VJointPos()
+
+ self.IsRealBone = 0 # this is set to 1 when the bone is actually a bone in the mesh and not a dummy
+
+ def dump(self):
+ data = pack('64sLii', self.Name, self.Flags, self.NumChildren, self.ParentIndex) + self.BonePos.dump()
+ return data
+
class VRawBoneInfluence:
- def __init__(self):
- self.Weight = 0.0
- self.PointIndex = 0
- self.BoneIndex = 0
-
- def dump(self):
- data = pack('fii', self.Weight, self.PointIndex, self.BoneIndex)
- return data
-
+ def __init__(self):
+ self.Weight = 0.0
+ self.PointIndex = 0
+ self.BoneIndex = 0
+
+ def dump(self):
+ data = pack('fii', self.Weight, self.PointIndex, self.BoneIndex)
+ return data
+
class VQuatAnimKey:
- def __init__(self):
- self.Position = FVector()
- self.Orientation = FQuat()
- self.Time = 0.0
-
- def dump(self):
- data = self.Position.dump() + self.Orientation.dump() + pack('f', self.Time)
- return data
-
+ def __init__(self):
+ self.Position = FVector()
+ self.Orientation = FQuat()
+ self.Time = 0.0
+
+ def dump(self):
+ data = self.Position.dump() + self.Orientation.dump() + pack('f', self.Time)
+ return data
+
class VVertex(object):
- def __init__(self):
- self.PointIndex = 0 # WORD
- self.U = 0.0
- self.V = 0.0
- self.MatIndex = 0 #BYTE
- self.Reserved = 0 #BYTE
-
- def dump(self):
- data = pack('HHffBBH', self.PointIndex, 0, self.U, self.V, self.MatIndex, self.Reserved, 0)
- return data
-
- def __cmp__(self, other):
- return cmp(self.PointIndex, other.PointIndex) \
- or cmp(self.U, other.U) \
- or cmp(self.V, other.V) \
- or cmp(self.MatIndex, other.MatIndex) \
- or cmp(self.Reserved, other.Reserved)
-
- def _key(self):
- return (type(self).__name__,self.PointIndex, self.U, self.V,self.MatIndex,self.Reserved)
-
- def __hash__(self):
- return hash(self._key())
-
- def __eq__(self, other):
- if not hasattr(other, '_key'):
- return False
- return self._key() == other._key()
-
+ def __init__(self):
+ self.PointIndex = 0 # WORD
+ self.U = 0.0
+ self.V = 0.0
+ self.MatIndex = 0 #BYTE
+ self.Reserved = 0 #BYTE
+
+ def dump(self):
+ data = pack('HHffBBH', self.PointIndex, 0, self.U, self.V, self.MatIndex, self.Reserved, 0)
+ return data
+
+ def __cmp__(self, other):
+ return cmp(self.PointIndex, other.PointIndex) \
+ or cmp(self.U, other.U) \
+ or cmp(self.V, other.V) \
+ or cmp(self.MatIndex, other.MatIndex) \
+ or cmp(self.Reserved, other.Reserved)
+
+ def _key(self):
+ return (type(self).__name__,self.PointIndex, self.U, self.V,self.MatIndex,self.Reserved)
+
+ def __hash__(self):
+ return hash(self._key())
+
+ def __eq__(self, other):
+ if not hasattr(other, '_key'):
+ return False
+ return self._key() == other._key()
+
class VPoint(object):
- def __init__(self):
- self.Point = FVector()
-
- def dump(self):
- return self.Point.dump()
-
- def __cmp__(self, other):
- return cmp(self.Point, other.Point)
-
- def _key(self):
- return (type(self).__name__, self.Point)
-
- def __hash__(self):
- return hash(self._key())
-
- def __eq__(self, other):
- if not hasattr(other, '_key'):
- return False
- return self._key() == other._key()
-
+ def __init__(self):
+ self.Point = FVector()
+
+ def dump(self):
+ return self.Point.dump()
+
+ def __cmp__(self, other):
+ return cmp(self.Point, other.Point)
+
+ def _key(self):
+ return (type(self).__name__, self.Point)
+
+ def __hash__(self):
+ return hash(self._key())
+
+ def __eq__(self, other):
+ if not hasattr(other, '_key'):
+ return False
+ return self._key() == other._key()
+
class VTriangle:
- def __init__(self):
- self.WedgeIndex0 = 0 # WORD
- self.WedgeIndex1 = 0 # WORD
- self.WedgeIndex2 = 0 # WORD
- self.MatIndex = 0 # BYTE
- self.AuxMatIndex = 0 # BYTE
- self.SmoothingGroups = 0 # DWORD
-
- def dump(self):
- data = pack('HHHBBL', self.WedgeIndex0, self.WedgeIndex1, self.WedgeIndex2, self.MatIndex, self.AuxMatIndex, self.SmoothingGroups)
- return data
+ def __init__(self):
+ self.WedgeIndex0 = 0 # WORD
+ self.WedgeIndex1 = 0 # WORD
+ self.WedgeIndex2 = 0 # WORD
+ self.MatIndex = 0 # BYTE
+ self.AuxMatIndex = 0 # BYTE
+ self.SmoothingGroups = 0 # DWORD
+
+ def dump(self):
+ data = pack('HHHBBL', self.WedgeIndex0, self.WedgeIndex1, self.WedgeIndex2, self.MatIndex, self.AuxMatIndex, self.SmoothingGroups)
+ return data
# END UNREAL DATA STRUCTS
########################################################################
@@ -369,1056 +372,1056 @@ class VTriangle:
#RG - helper class to handle the normal way the UT files are stored
#as sections consisting of a header and then a list of data structures
class FileSection:
- def __init__(self, name, type_size):
- self.Header = VChunkHeader(name, type_size)
- self.Data = [] # list of datatypes
-
- def dump(self):
- data = self.Header.dump()
- for i in range(len(self.Data)):
- data = data + self.Data[i].dump()
- return data
-
- def UpdateHeader(self):
- self.Header.DataCount = len(self.Data)
-
+ def __init__(self, name, type_size):
+ self.Header = VChunkHeader(name, type_size)
+ self.Data = [] # list of datatypes
+
+ def dump(self):
+ data = self.Header.dump()
+ for i in range(len(self.Data)):
+ data = data + self.Data[i].dump()
+ return data
+
+ def UpdateHeader(self):
+ self.Header.DataCount = len(self.Data)
+
class PSKFile:
- def __init__(self):
- self.GeneralHeader = VChunkHeader("ACTRHEAD", 0)
- self.Points = FileSection("PNTS0000", SIZE_VPOINT) #VPoint
- self.Wedges = FileSection("VTXW0000", SIZE_VVERTEX) #VVertex
- self.Faces = FileSection("FACE0000", SIZE_VTRIANGLE) #VTriangle
- self.Materials = FileSection("MATT0000", SIZE_VMATERIAL) #VMaterial
- self.Bones = FileSection("REFSKELT", SIZE_VBONE) #VBone
- self.Influences = FileSection("RAWWEIGHTS", SIZE_VRAWBONEINFLUENCE) #VRawBoneInfluence
-
- #RG - this mapping is not dumped, but is used internally to store the new point indices
- # for vertex groups calculated during the mesh dump, so they can be used again
- # to dump bone influences during the armature dump
- #
- # the key in this dictionary is the VertexGroup/Bone Name, and the value
- # is a list of tuples containing the new point index and the weight, in that order
- #
- # Layout:
- # { groupname : [ (index, weight), ... ], ... }
- #
- # example:
- # { 'MyVertexGroup' : [ (0, 1.0), (5, 1.0), (3, 0.5) ] , 'OtherGroup' : [(2, 1.0)] }
-
- self.VertexGroups = {}
-
- def AddPoint(self, p):
- #print ('AddPoint')
- self.Points.Data.append(p)
-
- def AddWedge(self, w):
- #print ('AddWedge')
- self.Wedges.Data.append(w)
-
- def AddFace(self, f):
- #print ('AddFace')
- self.Faces.Data.append(f)
-
- def AddMaterial(self, m):
- #print ('AddMaterial')
- self.Materials.Data.append(m)
-
- def AddBone(self, b):
- #print ('AddBone [%s]: Position: (x=%f, y=%f, z=%f) Rotation=(%f,%f,%f,%f)' % (b.Name, b.BonePos.Position.X, b.BonePos.Position.Y, b.BonePos.Position.Z, b.BonePos.Orientation.X,b.BonePos.Orientation.Y,b.BonePos.Orientation.Z,b.BonePos.Orientation.W))
- self.Bones.Data.append(b)
-
- def AddInfluence(self, i):
- #print ('AddInfluence')
- self.Influences.Data.append(i)
-
- def UpdateHeaders(self):
- self.Points.UpdateHeader()
- self.Wedges.UpdateHeader()
- self.Faces.UpdateHeader()
- self.Materials.UpdateHeader()
- self.Bones.UpdateHeader()
- self.Influences.UpdateHeader()
-
- def dump(self):
- self.UpdateHeaders()
- data = self.GeneralHeader.dump() + self.Points.dump() + self.Wedges.dump() + self.Faces.dump() + self.Materials.dump() + self.Bones.dump() + self.Influences.dump()
- return data
-
- def GetMatByIndex(self, mat_index):
- if mat_index >= 0 and len(self.Materials.Data) > mat_index:
- return self.Materials.Data[mat_index]
- else:
- m = VMaterial()
- m.MaterialName = "Mat%i" % mat_index
- self.AddMaterial(m)
- return m
-
- def PrintOut(self):
- print ("--- PSK FILE EXPORTED ---")
- print ('point count: %i' % len(self.Points.Data))
- print ('wedge count: %i' % len(self.Wedges.Data))
- print ('face count: %i' % len(self.Faces.Data))
- print ('material count: %i' % len(self.Materials.Data))
- print ('bone count: %i' % len(self.Bones.Data))
- print ('inlfuence count: %i' % len(self.Influences.Data))
- print ('-------------------------')
+ def __init__(self):
+ self.GeneralHeader = VChunkHeader("ACTRHEAD", 0)
+ self.Points = FileSection("PNTS0000", SIZE_VPOINT) #VPoint
+ self.Wedges = FileSection("VTXW0000", SIZE_VVERTEX) #VVertex
+ self.Faces = FileSection("FACE0000", SIZE_VTRIANGLE) #VTriangle
+ self.Materials = FileSection("MATT0000", SIZE_VMATERIAL) #VMaterial
+ self.Bones = FileSection("REFSKELT", SIZE_VBONE) #VBone
+ self.Influences = FileSection("RAWWEIGHTS", SIZE_VRAWBONEINFLUENCE) #VRawBoneInfluence
+
+ #RG - this mapping is not dumped, but is used internally to store the new point indices
+ # for vertex groups calculated during the mesh dump, so they can be used again
+ # to dump bone influences during the armature dump
+ #
+ # the key in this dictionary is the VertexGroup/Bone Name, and the value
+ # is a list of tuples containing the new point index and the weight, in that order
+ #
+ # Layout:
+ # { groupname : [ (index, weight), ... ], ... }
+ #
+ # example:
+ # { 'MyVertexGroup' : [ (0, 1.0), (5, 1.0), (3, 0.5) ] , 'OtherGroup' : [(2, 1.0)] }
+
+ self.VertexGroups = {}
+
+ def AddPoint(self, p):
+ #print ('AddPoint')
+ self.Points.Data.append(p)
+
+ def AddWedge(self, w):
+ #print ('AddWedge')
+ self.Wedges.Data.append(w)
+
+ def AddFace(self, f):
+ #print ('AddFace')
+ self.Faces.Data.append(f)
+
+ def AddMaterial(self, m):
+ #print ('AddMaterial')
+ self.Materials.Data.append(m)
+
+ def AddBone(self, b):
+ #print ('AddBone [%s]: Position: (x=%f, y=%f, z=%f) Rotation=(%f,%f,%f,%f)' % (b.Name, b.BonePos.Position.X, b.BonePos.Position.Y, b.BonePos.Position.Z, b.BonePos.Orientation.X,b.BonePos.Orientation.Y,b.BonePos.Orientation.Z,b.BonePos.Orientation.W))
+ self.Bones.Data.append(b)
+
+ def AddInfluence(self, i):
+ #print ('AddInfluence')
+ self.Influences.Data.append(i)
+
+ def UpdateHeaders(self):
+ self.Points.UpdateHeader()
+ self.Wedges.UpdateHeader()
+ self.Faces.UpdateHeader()
+ self.Materials.UpdateHeader()
+ self.Bones.UpdateHeader()
+ self.Influences.UpdateHeader()
+
+ def dump(self):
+ self.UpdateHeaders()
+ data = self.GeneralHeader.dump() + self.Points.dump() + self.Wedges.dump() + self.Faces.dump() + self.Materials.dump() + self.Bones.dump() + self.Influences.dump()
+ return data
+
+ def GetMatByIndex(self, mat_index):
+ if mat_index >= 0 and len(self.Materials.Data) > mat_index:
+ return self.Materials.Data[mat_index]
+ else:
+ m = VMaterial()
+ m.MaterialName = "Mat%i" % mat_index
+ self.AddMaterial(m)
+ return m
+
+ def PrintOut(self):
+ print ("--- PSK FILE EXPORTED ---")
+ print ('point count: %i' % len(self.Points.Data))
+ print ('wedge count: %i' % len(self.Wedges.Data))
+ print ('face count: %i' % len(self.Faces.Data))
+ print ('material count: %i' % len(self.Materials.Data))
+ print ('bone count: %i' % len(self.Bones.Data))
+ print ('inlfuence count: %i' % len(self.Influences.Data))
+ print ('-------------------------')
# PSA FILE NOTES FROM UDN:
#
-# The raw key array holds all the keys for all the bones in all the specified sequences,
-# organized as follows:
-# For each AnimInfoBinary's sequence there are [Number of bones] times [Number of frames keys]
-# in the VQuatAnimKeys, laid out as tracks of [numframes] keys for each bone in the order of
-# the bones as defined in the array of FnamedBoneBinary in the PSA.
+# The raw key array holds all the keys for all the bones in all the specified sequences,
+# organized as follows:
+# For each AnimInfoBinary's sequence there are [Number of bones] times [Number of frames keys]
+# in the VQuatAnimKeys, laid out as tracks of [numframes] keys for each bone in the order of
+# the bones as defined in the array of FnamedBoneBinary in the PSA.
#
-# Once the data from the PSK (now digested into native skeletal mesh) and PSA (digested into
-# a native animation object containing one or more sequences) are associated together at runtime,
-# bones are linked up by name. Any bone in a skeleton (from the PSK) that finds no partner in
-# the animation sequence (from the PSA) will assume its reference pose stance ( as defined in
-# the offsets & rotations that are in the VBones making up the reference skeleton from the PSK)
+# Once the data from the PSK (now digested into native skeletal mesh) and PSA (digested into
+# a native animation object containing one or more sequences) are associated together at runtime,
+# bones are linked up by name. Any bone in a skeleton (from the PSK) that finds no partner in
+# the animation sequence (from the PSA) will assume its reference pose stance ( as defined in
+# the offsets & rotations that are in the VBones making up the reference skeleton from the PSK)
class PSAFile:
- def __init__(self):
- self.GeneralHeader = VChunkHeader("ANIMHEAD", 0)
- self.Bones = FileSection("BONENAMES", SIZE_FNAMEDBONEBINARY) #FNamedBoneBinary
- self.Animations = FileSection("ANIMINFO", SIZE_ANIMINFOBINARY) #AnimInfoBinary
- self.RawKeys = FileSection("ANIMKEYS", SIZE_VQUATANIMKEY) #VQuatAnimKey
-
- # this will take the format of key=Bone Name, value = (BoneIndex, Bone Object)
- # THIS IS NOT DUMPED
- self.BoneLookup = {}
-
- def dump(self):
- data = self.Generalheader.dump() + self.Bones.dump() + self.Animations.dump() + self.RawKeys.dump()
- return data
-
- def AddBone(self, b):
- #LOUD
- #print "AddBone: " + b.Name
- self.Bones.Data.append(b)
-
- def AddAnimation(self, a):
- #LOUD
- #print "AddAnimation: %s, TotalBones: %i, AnimRate: %f, NumRawFrames: %i, TrackTime: %f" % (a.Name, a.TotalBones, a.AnimRate, a.NumRawFrames, a.TrackTime)
- self.Animations.Data.append(a)
-
- def AddRawKey(self, k):
- #LOUD
- #print "AddRawKey [%i]: Time: %f, Quat: x=%f, y=%f, z=%f, w=%f, Position: x=%f, y=%f, z=%f" % (len(self.RawKeys.Data), k.Time, k.Orientation.X, k.Orientation.Y, k.Orientation.Z, k.Orientation.W, k.Position.X, k.Position.Y, k.Position.Z)
- self.RawKeys.Data.append(k)
-
- def UpdateHeaders(self):
- self.Bones.UpdateHeader()
- self.Animations.UpdateHeader()
- self.RawKeys.UpdateHeader()
-
- def GetBoneByIndex(self, bone_index):
- if bone_index >= 0 and len(self.Bones.Data) > bone_index:
- return self.Bones.Data[bone_index]
-
- def IsEmpty(self):
- return (len(self.Bones.Data) == 0 or len(self.Animations.Data) == 0)
-
- def StoreBone(self, b):
- self.BoneLookup[b.Name] = [-1, b]
-
- def UseBone(self, bone_name):
- if bone_name in self.BoneLookup:
- bone_data = self.BoneLookup[bone_name]
-
- if bone_data[0] == -1:
- bone_data[0] = len(self.Bones.Data)
- self.AddBone(bone_data[1])
- #self.Bones.Data.append(bone_data[1])
-
- return bone_data[0]
-
- def GetBoneByName(self, bone_name):
- if bone_name in self.BoneLookup:
- bone_data = self.BoneLookup[bone_name]
- return bone_data[1]
-
- def GetBoneIndex(self, bone_name):
- if bone_name in self.BoneLookup:
- bone_data = self.BoneLookup[bone_name]
- return bone_data[0]
-
- def dump(self):
- self.UpdateHeaders()
- data = self.GeneralHeader.dump() + self.Bones.dump() + self.Animations.dump() + self.RawKeys.dump()
- return data
-
- def PrintOut(self):
- print ('--- PSA FILE EXPORTED ---')
- print ('bone count: %i' % len(self.Bones.Data))
- print ('animation count: %i' % len(self.Animations.Data))
- print ('rawkey count: %i' % len(self.RawKeys.Data))
- print ('-------------------------')
-
-####################################
+ def __init__(self):
+ self.GeneralHeader = VChunkHeader("ANIMHEAD", 0)
+ self.Bones = FileSection("BONENAMES", SIZE_FNAMEDBONEBINARY) #FNamedBoneBinary
+ self.Animations = FileSection("ANIMINFO", SIZE_ANIMINFOBINARY) #AnimInfoBinary
+ self.RawKeys = FileSection("ANIMKEYS", SIZE_VQUATANIMKEY) #VQuatAnimKey
+
+ # this will take the format of key=Bone Name, value = (BoneIndex, Bone Object)
+ # THIS IS NOT DUMPED
+ self.BoneLookup = {}
+
+ def dump(self):
+ data = self.Generalheader.dump() + self.Bones.dump() + self.Animations.dump() + self.RawKeys.dump()
+ return data
+
+ def AddBone(self, b):
+ #LOUD
+ #print "AddBone: " + b.Name
+ self.Bones.Data.append(b)
+
+ def AddAnimation(self, a):
+ #LOUD
+ #print "AddAnimation: %s, TotalBones: %i, AnimRate: %f, NumRawFrames: %i, TrackTime: %f" % (a.Name, a.TotalBones, a.AnimRate, a.NumRawFrames, a.TrackTime)
+ self.Animations.Data.append(a)
+
+ def AddRawKey(self, k):
+ #LOUD
+ #print "AddRawKey [%i]: Time: %f, Quat: x=%f, y=%f, z=%f, w=%f, Position: x=%f, y=%f, z=%f" % (len(self.RawKeys.Data), k.Time, k.Orientation.X, k.Orientation.Y, k.Orientation.Z, k.Orientation.W, k.Position.X, k.Position.Y, k.Position.Z)
+ self.RawKeys.Data.append(k)
+
+ def UpdateHeaders(self):
+ self.Bones.UpdateHeader()
+ self.Animations.UpdateHeader()
+ self.RawKeys.UpdateHeader()
+
+ def GetBoneByIndex(self, bone_index):
+ if bone_index >= 0 and len(self.Bones.Data) > bone_index:
+ return self.Bones.Data[bone_index]
+
+ def IsEmpty(self):
+ return (len(self.Bones.Data) == 0 or len(self.Animations.Data) == 0)
+
+ def StoreBone(self, b):
+ self.BoneLookup[b.Name] = [-1, b]
+
+ def UseBone(self, bone_name):
+ if bone_name in self.BoneLookup:
+ bone_data = self.BoneLookup[bone_name]
+
+ if bone_data[0] == -1:
+ bone_data[0] = len(self.Bones.Data)
+ self.AddBone(bone_data[1])
+ #self.Bones.Data.append(bone_data[1])
+
+ return bone_data[0]
+
+ def GetBoneByName(self, bone_name):
+ if bone_name in self.BoneLookup:
+ bone_data = self.BoneLookup[bone_name]
+ return bone_data[1]
+
+ def GetBoneIndex(self, bone_name):
+ if bone_name in self.BoneLookup:
+ bone_data = self.BoneLookup[bone_name]
+ return bone_data[0]
+
+ def dump(self):
+ self.UpdateHeaders()
+ data = self.GeneralHeader.dump() + self.Bones.dump() + self.Animations.dump() + self.RawKeys.dump()
+ return data
+
+ def PrintOut(self):
+ print ('--- PSA FILE EXPORTED ---')
+ print ('bone count: %i' % len(self.Bones.Data))
+ print ('animation count: %i' % len(self.Animations.Data))
+ print ('rawkey count: %i' % len(self.RawKeys.Data))
+ print ('-------------------------')
+
+####################################
# helpers to create bone structs
def make_vbone(name, parent_index, child_count, orientation_quat, position_vect):
- bone = VBone()
- bone.Name = name
- bone.ParentIndex = parent_index
- bone.NumChildren = child_count
- bone.BonePos.Orientation = orientation_quat
- bone.BonePos.Position.X = position_vect.x
- bone.BonePos.Position.Y = position_vect.y
- bone.BonePos.Position.Z = position_vect.z
-
- #these values seem to be ignored?
- #bone.BonePos.Length = tail.length
- #bone.BonePos.XSize = tail.x
- #bone.BonePos.YSize = tail.y
- #bone.BonePos.ZSize = tail.z
-
- return bone
+ bone = VBone()
+ bone.Name = name
+ bone.ParentIndex = parent_index
+ bone.NumChildren = child_count
+ bone.BonePos.Orientation = orientation_quat
+ bone.BonePos.Position.X = position_vect.x
+ bone.BonePos.Position.Y = position_vect.y
+ bone.BonePos.Position.Z = position_vect.z
+
+ #these values seem to be ignored?
+ #bone.BonePos.Length = tail.length
+ #bone.BonePos.XSize = tail.x
+ #bone.BonePos.YSize = tail.y
+ #bone.BonePos.ZSize = tail.z
+
+ return bone
def make_namedbonebinary(name, parent_index, child_count, orientation_quat, position_vect, is_real):
- bone = FNamedBoneBinary()
- bone.Name = name
- bone.ParentIndex = parent_index
- bone.NumChildren = child_count
- bone.BonePos.Orientation = orientation_quat
- bone.BonePos.Position.X = position_vect.x
- bone.BonePos.Position.Y = position_vect.y
- bone.BonePos.Position.Z = position_vect.z
- bone.IsRealBone = is_real
- return bone
-
+ bone = FNamedBoneBinary()
+ bone.Name = name
+ bone.ParentIndex = parent_index
+ bone.NumChildren = child_count
+ bone.BonePos.Orientation = orientation_quat
+ bone.BonePos.Position.X = position_vect.x
+ bone.BonePos.Position.Y = position_vect.y
+ bone.BonePos.Position.Z = position_vect.z
+ bone.IsRealBone = is_real
+ return bone
+
##################################################
#RG - check to make sure face isnt a line
#The face has to be triangle not a line
def is_1d_face(blender_face,mesh):
- #ID Vertex of id point
- v0 = blender_face.vertices[0]
- v1 = blender_face.vertices[1]
- v2 = blender_face.vertices[2]
-
- return (mesh.vertices[v0].co == mesh.vertices[v1].co or \
- mesh.vertices[v1].co == mesh.vertices[v2].co or \
- mesh.vertices[v2].co == mesh.vertices[v0].co)
- return False
+ #ID Vertex of id point
+ v0 = blender_face.vertices[0]
+ v1 = blender_face.vertices[1]
+ v2 = blender_face.vertices[2]
+
+ return (mesh.vertices[v0].co == mesh.vertices[v1].co or \
+ mesh.vertices[v1].co == mesh.vertices[v2].co or \
+ mesh.vertices[v2].co == mesh.vertices[v0].co)
+ return False
##################################################
# http://en.wikibooks.org/wiki/Blender_3D:_Blending_Into_Python/Cookbook#Triangulate_NMesh
#blender 2.50 format using the Operators/command convert the mesh to tri mesh
def triangulateNMesh(object):
- bneedtri = False
- scene = bpy.context.scene
- bpy.ops.object.mode_set(mode='OBJECT')
- for i in scene.objects: i.select = False #deselect all objects
- object.select = True
- scene.objects.active = object #set the mesh object to current
- bpy.ops.object.mode_set(mode='OBJECT')
- print("Checking mesh if needs to convert quad to Tri...")
- for face in object.data.faces:
- if (len(face.vertices) > 3):
- bneedtri = True
- break
-
- bpy.ops.object.mode_set(mode='OBJECT')
- if bneedtri == True:
- print("Converting quad to tri mesh...")
- me_da = object.data.copy() #copy data
- me_ob = object.copy() #copy object
- #note two copy two types else it will use the current data or mesh
- me_ob.data = me_da
- bpy.context.scene.objects.link(me_ob)#link the object to the scene #current object location
- for i in scene.objects: i.select = False #deselect all objects
- me_ob.select = True
- scene.objects.active = me_ob #set the mesh object to current
- bpy.ops.object.mode_set(mode='EDIT') #Operators
- bpy.ops.mesh.select_all(action='SELECT')#select all the face/vertex/edge
- bpy.ops.mesh.quads_convert_to_tris() #Operators
- bpy.context.scene.update()
- bpy.ops.object.mode_set(mode='OBJECT') # set it in object
- bpy.context.scene.unrealtriangulatebool = True
- print("Triangulate Mesh Done!")
- else:
- print("No need to convert tri mesh.")
- me_ob = object
- return me_ob
+ bneedtri = False
+ scene = bpy.context.scene
+ bpy.ops.object.mode_set(mode='OBJECT')
+ for i in scene.objects: i.select = False #deselect all objects
+ object.select = True
+ scene.objects.active = object #set the mesh object to current
+ bpy.ops.object.mode_set(mode='OBJECT')
+ print("Checking mesh if needs to convert quad to Tri...")
+ for face in object.data.faces:
+ if (len(face.vertices) > 3):
+ bneedtri = True
+ break
+
+ bpy.ops.object.mode_set(mode='OBJECT')
+ if bneedtri == True:
+ print("Converting quad to tri mesh...")
+ me_da = object.data.copy() #copy data
+ me_ob = object.copy() #copy object
+ #note two copy two types else it will use the current data or mesh
+ me_ob.data = me_da
+ bpy.context.scene.objects.link(me_ob)#link the object to the scene #current object location
+ for i in scene.objects: i.select = False #deselect all objects
+ me_ob.select = True
+ scene.objects.active = me_ob #set the mesh object to current
+ bpy.ops.object.mode_set(mode='EDIT') #Operators
+ bpy.ops.mesh.select_all(action='SELECT')#select all the face/vertex/edge
+ bpy.ops.mesh.quads_convert_to_tris() #Operators
+ bpy.context.scene.update()
+ bpy.ops.object.mode_set(mode='OBJECT') # set it in object
+ bpy.context.scene.unrealtriangulatebool = True
+ print("Triangulate Mesh Done!")
+ else:
+ print("No need to convert tri mesh.")
+ me_ob = object
+ return me_ob
#Blender Bone Index
class BBone:
- def __init__(self):
- self.bone = ""
- self.index = 0
+ def __init__(self):
+ self.bone = ""
+ self.index = 0
bonedata = []
-BBCount = 0
+BBCount = 0
#deal with mesh bones groups vertex point
def BoneIndex(bone):
- global BBCount, bonedata
- #print("//==============")
- #print(bone.name , "ID:",BBCount)
- BB = BBone()
- BB.bone = bone.name
- BB.index = BBCount
- bonedata.append(BB)
- BBCount += 1
- for current_child_bone in bone.children:
- BoneIndex(current_child_bone)
+ global BBCount, bonedata
+ #print("//==============")
+ #print(bone.name , "ID:",BBCount)
+ BB = BBone()
+ BB.bone = bone.name
+ BB.index = BBCount
+ bonedata.append(BB)
+ BBCount += 1
+ for current_child_bone in bone.children:
+ BoneIndex(current_child_bone)
def BoneIndexArmature(blender_armature):
- global BBCount
- #print("\n Buildng bone before mesh \n")
- #objectbone = blender_armature.pose #Armature bone
- #print(blender_armature)
- objectbone = blender_armature[0].pose
- #print(dir(ArmatureData))
-
- for bone in objectbone.bones:
- if(bone.parent == None):
- BoneIndex(bone)
- #BBCount += 1
- break
-
+ global BBCount
+ #print("\n Buildng bone before mesh \n")
+ #objectbone = blender_armature.pose #Armature bone
+ #print(blender_armature)
+ objectbone = blender_armature[0].pose
+ #print(dir(ArmatureData))
+
+ for bone in objectbone.bones:
+ if(bone.parent == None):
+ BoneIndex(bone)
+ #BBCount += 1
+ break
+
# Actual object parsing functions
def parse_meshes(blender_meshes, psk_file):
- #this is use to call the bone name and the index array for group index matches
- global bonedata
- #print("BONE DATA",len(bonedata))
- print ("----- parsing meshes -----")
- print("Number of Object Meshes:",len(blender_meshes))
- for current_obj in blender_meshes: #number of mesh that should be one mesh here
-
- current_obj = triangulateNMesh(current_obj)
- #print(dir(current_obj))
- print("Mesh Name:",current_obj.name)
- current_mesh = current_obj.data
-
- #if len(current_obj.materials) > 0:
- # object_mat = current_obj.materials[0]
- object_material_index = current_obj.active_material_index
-
- points = ObjMap()
- wedges = ObjMap()
-
- discarded_face_count = 0
- print (" -- Dumping Mesh Faces -- LEN:", len(current_mesh.faces))
- for current_face in current_mesh.faces:
- #print ' -- Dumping UVs -- '
- #print current_face.uv_textures
-
- if len(current_face.vertices) != 3:
- raise RuntimeError("Non-triangular face (%i)" % len(current_face.vertices))
-
- #No Triangulate Yet
- # if len(current_face.vertices) != 3:
- # raise RuntimeError("Non-triangular face (%i)" % len(current_face.vertices))
- # #TODO: add two fake faces made of triangles?
-
- #RG - apparently blender sometimes has problems when you do quad to triangle
- # conversion, and ends up creating faces that have only TWO points -
- # one of the points is simply in the vertex list for the face twice.
- # This is bad, since we can't get a real face normal for a LINE, we need
- # a plane for this. So, before we add the face to the list of real faces,
- # ensure that the face is actually a plane, and not a line. If it is not
- # planar, just discard it and notify the user in the console after we're
- # done dumping the rest of the faces
-
- if not is_1d_face(current_face,current_mesh):
- #print("faces")
- wedge_list = []
- vect_list = []
-
- #get or create the current material
- m = psk_file.GetMatByIndex(object_material_index)
-
- face_index = current_face.index
- has_UV = False
- faceUV = None
-
- if len(current_mesh.uv_textures) > 0:
- has_UV = True
- #print("face index: ",face_index)
- #faceUV = current_mesh.uv_textures.active.data[face_index]#UVs for current face
- #faceUV = current_mesh.uv_textures.active.data[0]#UVs for current face
- #print(face_index,"<[FACE NUMBER")
- uv_layer = current_mesh.uv_textures.active
- faceUV = uv_layer.data[face_index]
- #print("============================")
- #size(data) is number of texture faces. Each face has UVs
- #print("DATA face uv: ",len(faceUV.uv), " >> ",(faceUV.uv[0][0]))
-
- for i in range(3):
- vert_index = current_face.vertices[i]
- vert = current_mesh.vertices[vert_index]
- uv = []
- #assumes 3 UVs Per face (for now).
- if (has_UV):
- if len(faceUV.uv) != 3:
- print ("WARNING: Current face is missing UV coordinates - writing 0,0...")
- print ("WARNING: Face has more than 3 UVs - writing 0,0...")
- uv = [0.0, 0.0]
- else:
- #uv.append(faceUV.uv[i][0])
- #uv.append(faceUV.uv[i][1])
- uv = [faceUV.uv[i][0],faceUV.uv[i][1]] #OR bottom works better # 24 for cube
- #uv = list(faceUV.uv[i]) #30 just cube
- else:
- print ("No UVs?")
- uv = [0.0, 0.0]
- #print("UV >",uv)
- #uv = [0.0, 0.0] #over ride uv that is not fixed
- #print(uv)
- #flip V coordinate because UEd requires it and DOESN'T flip it on its own like it
- #does with the mesh Y coordinates.
- #this is otherwise known as MAGIC-2
- uv[1] = 1.0 - uv[1]
-
- #deal with the min and max value
- #if value is over the set limit it will null the uv texture
- if (uv[0] > 1):
- uv[0] = 1
- if (uv[0] < 0):
- uv[0] = 0
- if (uv[1] > 1):
- uv[1] = 1
- if (uv[1] < 0):
- uv[1] = 0
-
-
- # RE - Append untransformed vector (for normal calc below)
- # TODO: convert to Blender.Mathutils
- vect_list.append(FVector(vert.co.x, vert.co.y, vert.co.z))
-
- # Transform position for export
- #vpos = vert.co * object_material_index
- vpos = vert.co * current_obj.matrix_local
- # Create the point
- p = VPoint()
- p.Point.X = vpos.x
- p.Point.Y = vpos.y
- p.Point.Z = vpos.z
-
- # Create the wedge
- w = VVertex()
- w.MatIndex = object_material_index
- w.PointIndex = points.get(p) # get index from map
- #Set UV TEXTURE
- w.U = uv[0]
- w.V = uv[1]
- index_wedge = wedges.get(w)
- wedge_list.append(index_wedge)
-
- #print results
- #print 'result PointIndex=%i, U=%f, V=%f, wedge_index=%i' % (
- # w.PointIndex,
- # w.U,
- # w.V,
- # wedge_index)
-
- # Determine face vertex order
- # get normal from blender
- no = current_face.normal
-
- # TODO: convert to Blender.Mathutils
- # convert to FVector
- norm = FVector(no[0], no[1], no[2])
-
- # Calculate the normal of the face in blender order
- tnorm = vect_list[1].sub(vect_list[0]).cross(vect_list[2].sub(vect_list[1]))
-
- # RE - dot the normal from blender order against the blender normal
- # this gives the product of the two vectors' lengths along the blender normal axis
- # all that matters is the sign
- dot = norm.dot(tnorm)
-
- # print results
- #print 'face norm: (%f,%f,%f), tnorm=(%f,%f,%f), dot=%f' % (
- # norm.X, norm.Y, norm.Z,
- # tnorm.X, tnorm.Y, tnorm.Z,
- # dot)
-
- tri = VTriangle()
- # RE - magic: if the dot product above > 0, order the vertices 2, 1, 0
- # if the dot product above < 0, order the vertices 0, 1, 2
- # if the dot product is 0, then blender's normal is coplanar with the face
- # and we cannot deduce which side of the face is the outside of the mesh
- if (dot > 0):
- (tri.WedgeIndex2, tri.WedgeIndex1, tri.WedgeIndex0) = wedge_list
- elif (dot < 0):
- (tri.WedgeIndex0, tri.WedgeIndex1, tri.WedgeIndex2) = wedge_list
- else:
- dindex0 = current_face.vertices[0];
- dindex1 = current_face.vertices[1];
- dindex2 = current_face.vertices[2];
- raise RuntimeError("normal vector coplanar with face! points:", current_mesh.vertices[dindex0].co, current_mesh.vertices[dindex1].co, current_mesh.vertices[dindex2].co)
-
- tri.MatIndex = object_material_index
- #print(tri)
- psk_file.AddFace(tri)
-
- else:
- discarded_face_count = discarded_face_count + 1
-
- print (" -- Dumping Mesh Points -- LEN:",len(points.dict))
- for point in points.items():
- psk_file.AddPoint(point)
- print (" -- Dumping Mesh Wedge -- LEN:",len(wedges.dict))
- for wedge in wedges.items():
- psk_file.AddWedge(wedge)
-
- #RG - if we happend upon any non-planar faces above that we've discarded,
- # just let the user know we discarded them here in case they want
- # to investigate
-
- if discarded_face_count > 0:
- print ("INFO: Discarded %i non-planar faces." % (discarded_face_count))
-
- #RG - walk through the vertex groups and find the indexes into the PSK points array
- #for them, then store that index and the weight as a tuple in a new list of
- #verts for the group that we can look up later by bone name, since Blender matches
- #verts to bones for influences by having the VertexGroup named the same thing as
- #the bone
-
- #vertex group.
- for bonegroup in bonedata:
- #print("bone gourp build:",bonegroup.bone)
- vert_list = []
- for current_vert in current_mesh.vertices:
- #print("INDEX V:",current_vert.index)
- vert_index = current_vert.index
- for vgroup in current_vert.groups:#vertex groupd id
- vert_weight = vgroup.weight
- if(bonegroup.index == vgroup.group):
- p = VPoint()
- vpos = current_vert.co * current_obj.matrix_local
- p.Point.X = vpos.x
- p.Point.Y = vpos.y
- p.Point.Z = vpos.z
- #print(current_vert.co)
- point_index = points.get(p) #point index
- v_item = (point_index, vert_weight)
- vert_list.append(v_item)
- #bone name, [point id and wieght]
- #print("Add Vertex Group:",bonegroup.bone, " No. Points:",len(vert_list))
- psk_file.VertexGroups[bonegroup.bone] = vert_list
-
- #unrealtriangulatebool #this will remove the mesh from the scene
- if (bpy.context.scene.unrealtriangulatebool == True):
- print("Remove tmp Mesh [ " ,current_obj.name, " ] from scene >" ,(bpy.context.scene.unrealtriangulatebool ))
- bpy.ops.object.mode_set(mode='OBJECT') # set it in object
- bpy.context.scene.objects.unlink(current_obj)
-
+ #this is use to call the bone name and the index array for group index matches
+ global bonedata
+ #print("BONE DATA",len(bonedata))
+ print ("----- parsing meshes -----")
+ print("Number of Object Meshes:",len(blender_meshes))
+ for current_obj in blender_meshes: #number of mesh that should be one mesh here
+
+ current_obj = triangulateNMesh(current_obj)
+ #print(dir(current_obj))
+ print("Mesh Name:",current_obj.name)
+ current_mesh = current_obj.data
+
+ #if len(current_obj.materials) > 0:
+ # object_mat = current_obj.materials[0]
+ object_material_index = current_obj.active_material_index
+
+ points = ObjMap()
+ wedges = ObjMap()
+
+ discarded_face_count = 0
+ print (" -- Dumping Mesh Faces -- LEN:", len(current_mesh.faces))
+ for current_face in current_mesh.faces:
+ #print ' -- Dumping UVs -- '
+ #print current_face.uv_textures
+
+ if len(current_face.vertices) != 3:
+ raise RuntimeError("Non-triangular face (%i)" % len(current_face.vertices))
+
+ #No Triangulate Yet
+ # if len(current_face.vertices) != 3:
+ # raise RuntimeError("Non-triangular face (%i)" % len(current_face.vertices))
+ # #TODO: add two fake faces made of triangles?
+
+ #RG - apparently blender sometimes has problems when you do quad to triangle
+ # conversion, and ends up creating faces that have only TWO points -
+ # one of the points is simply in the vertex list for the face twice.
+ # This is bad, since we can't get a real face normal for a LINE, we need
+ # a plane for this. So, before we add the face to the list of real faces,
+ # ensure that the face is actually a plane, and not a line. If it is not
+ # planar, just discard it and notify the user in the console after we're
+ # done dumping the rest of the faces
+
+ if not is_1d_face(current_face,current_mesh):
+ #print("faces")
+ wedge_list = []
+ vect_list = []
+
+ #get or create the current material
+ m = psk_file.GetMatByIndex(object_material_index)
+
+ face_index = current_face.index
+ has_UV = False
+ faceUV = None
+
+ if len(current_mesh.uv_textures) > 0:
+ has_UV = True
+ #print("face index: ",face_index)
+ #faceUV = current_mesh.uv_textures.active.data[face_index]#UVs for current face
+ #faceUV = current_mesh.uv_textures.active.data[0]#UVs for current face
+ #print(face_index,"<[FACE NUMBER")
+ uv_layer = current_mesh.uv_textures.active
+ faceUV = uv_layer.data[face_index]
+ #print("============================")
+ #size(data) is number of texture faces. Each face has UVs
+ #print("DATA face uv: ",len(faceUV.uv), " >> ",(faceUV.uv[0][0]))
+
+ for i in range(3):
+ vert_index = current_face.vertices[i]
+ vert = current_mesh.vertices[vert_index]
+ uv = []
+ #assumes 3 UVs Per face (for now).
+ if (has_UV):
+ if len(faceUV.uv) != 3:
+ print ("WARNING: Current face is missing UV coordinates - writing 0,0...")
+ print ("WARNING: Face has more than 3 UVs - writing 0,0...")
+ uv = [0.0, 0.0]
+ else:
+ #uv.append(faceUV.uv[i][0])
+ #uv.append(faceUV.uv[i][1])
+ uv = [faceUV.uv[i][0],faceUV.uv[i][1]] #OR bottom works better # 24 for cube
+ #uv = list(faceUV.uv[i]) #30 just cube
+ else:
+ print ("No UVs?")
+ uv = [0.0, 0.0]
+ #print("UV >",uv)
+ #uv = [0.0, 0.0] #over ride uv that is not fixed
+ #print(uv)
+ #flip V coordinate because UEd requires it and DOESN'T flip it on its own like it
+ #does with the mesh Y coordinates.
+ #this is otherwise known as MAGIC-2
+ uv[1] = 1.0 - uv[1]
+
+ #deal with the min and max value
+ #if value is over the set limit it will null the uv texture
+ if (uv[0] > 1):
+ uv[0] = 1
+ if (uv[0] < 0):
+ uv[0] = 0
+ if (uv[1] > 1):
+ uv[1] = 1
+ if (uv[1] < 0):
+ uv[1] = 0
+
+
+ # RE - Append untransformed vector (for normal calc below)
+ # TODO: convert to Blender.Mathutils
+ vect_list.append(FVector(vert.co.x, vert.co.y, vert.co.z))
+
+ # Transform position for export
+ #vpos = vert.co * object_material_index
+ vpos = vert.co * current_obj.matrix_local
+ # Create the point
+ p = VPoint()
+ p.Point.X = vpos.x
+ p.Point.Y = vpos.y
+ p.Point.Z = vpos.z
+
+ # Create the wedge
+ w = VVertex()
+ w.MatIndex = object_material_index
+ w.PointIndex = points.get(p) # get index from map
+ #Set UV TEXTURE
+ w.U = uv[0]
+ w.V = uv[1]
+ index_wedge = wedges.get(w)
+ wedge_list.append(index_wedge)
+
+ #print results
+ #print 'result PointIndex=%i, U=%f, V=%f, wedge_index=%i' % (
+ # w.PointIndex,
+ # w.U,
+ # w.V,
+ # wedge_index)
+
+ # Determine face vertex order
+ # get normal from blender
+ no = current_face.normal
+
+ # TODO: convert to Blender.Mathutils
+ # convert to FVector
+ norm = FVector(no[0], no[1], no[2])
+
+ # Calculate the normal of the face in blender order
+ tnorm = vect_list[1].sub(vect_list[0]).cross(vect_list[2].sub(vect_list[1]))
+
+ # RE - dot the normal from blender order against the blender normal
+ # this gives the product of the two vectors' lengths along the blender normal axis
+ # all that matters is the sign
+ dot = norm.dot(tnorm)
+
+ # print results
+ #print 'face norm: (%f,%f,%f), tnorm=(%f,%f,%f), dot=%f' % (
+ # norm.X, norm.Y, norm.Z,
+ # tnorm.X, tnorm.Y, tnorm.Z,
+ # dot)
+
+ tri = VTriangle()
+ # RE - magic: if the dot product above > 0, order the vertices 2, 1, 0
+ # if the dot product above < 0, order the vertices 0, 1, 2
+ # if the dot product is 0, then blender's normal is coplanar with the face
+ # and we cannot deduce which side of the face is the outside of the mesh
+ if (dot > 0):
+ (tri.WedgeIndex2, tri.WedgeIndex1, tri.WedgeIndex0) = wedge_list
+ elif (dot < 0):
+ (tri.WedgeIndex0, tri.WedgeIndex1, tri.WedgeIndex2) = wedge_list
+ else:
+ dindex0 = current_face.vertices[0];
+ dindex1 = current_face.vertices[1];
+ dindex2 = current_face.vertices[2];
+ raise RuntimeError("normal vector coplanar with face! points:", current_mesh.vertices[dindex0].co, current_mesh.vertices[dindex1].co, current_mesh.vertices[dindex2].co)
+
+ tri.MatIndex = object_material_index
+ #print(tri)
+ psk_file.AddFace(tri)
+
+ else:
+ discarded_face_count = discarded_face_count + 1
+
+ print (" -- Dumping Mesh Points -- LEN:",len(points.dict))
+ for point in points.items():
+ psk_file.AddPoint(point)
+ print (" -- Dumping Mesh Wedge -- LEN:",len(wedges.dict))
+ for wedge in wedges.items():
+ psk_file.AddWedge(wedge)
+
+ #RG - if we happend upon any non-planar faces above that we've discarded,
+ # just let the user know we discarded them here in case they want
+ # to investigate
+
+ if discarded_face_count > 0:
+ print ("INFO: Discarded %i non-planar faces." % (discarded_face_count))
+
+ #RG - walk through the vertex groups and find the indexes into the PSK points array
+ #for them, then store that index and the weight as a tuple in a new list of
+ #verts for the group that we can look up later by bone name, since Blender matches
+ #verts to bones for influences by having the VertexGroup named the same thing as
+ #the bone
+
+ #vertex group.
+ for bonegroup in bonedata:
+ #print("bone gourp build:",bonegroup.bone)
+ vert_list = []
+ for current_vert in current_mesh.vertices:
+ #print("INDEX V:",current_vert.index)
+ vert_index = current_vert.index
+ for vgroup in current_vert.groups:#vertex groupd id
+ vert_weight = vgroup.weight
+ if(bonegroup.index == vgroup.group):
+ p = VPoint()
+ vpos = current_vert.co * current_obj.matrix_local
+ p.Point.X = vpos.x
+ p.Point.Y = vpos.y
+ p.Point.Z = vpos.z
+ #print(current_vert.co)
+ point_index = points.get(p) #point index
+ v_item = (point_index, vert_weight)
+ vert_list.append(v_item)
+ #bone name, [point id and wieght]
+ #print("Add Vertex Group:",bonegroup.bone, " No. Points:",len(vert_list))
+ psk_file.VertexGroups[bonegroup.bone] = vert_list
+
+ #unrealtriangulatebool #this will remove the mesh from the scene
+ if (bpy.context.scene.unrealtriangulatebool == True):
+ print("Remove tmp Mesh [ " ,current_obj.name, " ] from scene >" ,(bpy.context.scene.unrealtriangulatebool ))
+ bpy.ops.object.mode_set(mode='OBJECT') # set it in object
+ bpy.context.scene.objects.unlink(current_obj)
+
def make_fquat(bquat):
- quat = FQuat()
-
- #flip handedness for UT = set x,y,z to negative (rotate in other direction)
- quat.X = -bquat.x
- quat.Y = -bquat.y
- quat.Z = -bquat.z
-
- quat.W = bquat.w
- return quat
-
+ quat = FQuat()
+
+ #flip handedness for UT = set x,y,z to negative (rotate in other direction)
+ quat.X = -bquat.x
+ quat.Y = -bquat.y
+ quat.Z = -bquat.z
+
+ quat.W = bquat.w
+ return quat
+
def make_fquat_default(bquat):
- quat = FQuat()
-
- quat.X = bquat.x
- quat.Y = bquat.y
- quat.Z = bquat.z
-
- quat.W = bquat.w
- return quat
+ quat = FQuat()
+
+ quat.X = bquat.x
+ quat.Y = bquat.y
+ quat.Z = bquat.z
+
+ quat.W = bquat.w
+ return quat
# =================================================================================================
# TODO: remove this 1am hack
nbone = 0
def parse_bone(blender_bone, psk_file, psa_file, parent_id, is_root_bone, parent_matrix, parent_root):
- global nbone # look it's evil!
- #print '-------------------- Dumping Bone ---------------------- '
-
- #If bone does not have parent that mean it the root bone
- if blender_bone.parent == None:
- parent_root = blender_bone
-
-
- child_count = len(blender_bone.children)
- #child of parent
- child_parent = blender_bone.parent
-
- if child_parent != None:
- print ("--Bone Name:",blender_bone.name ," parent:" , blender_bone.parent.name, "ID:", nbone)
- else:
- print ("--Bone Name:",blender_bone.name ," parent: None" , "ID:", nbone)
-
- if child_parent != None:
- quat_root = blender_bone.matrix
- quat = make_fquat(quat_root.to_quat())
-
- quat_parent = child_parent.matrix.to_quat().inverse()
- parent_head = child_parent.head * quat_parent
- parent_tail = child_parent.tail * quat_parent
-
- set_position = (parent_tail - parent_head) + blender_bone.head
- else:
- # ROOT BONE
- #This for root
- set_position = blender_bone.head * parent_matrix #ARMATURE OBJECT Locction
- rot_mat = blender_bone.matrix * parent_matrix.rotation_part() #ARMATURE OBJECT Rotation
- #print(dir(rot_mat))
-
- quat = make_fquat_default(rot_mat.to_quat())
-
- print ("[[======= FINAL POSITION:", set_position)
- final_parent_id = parent_id
-
- #RG/RE -
- #if we are not seperated by a small distance, create a dummy bone for the displacement
- #this is only needed for root bones, since UT assumes a connected skeleton, and from here
- #down the chain we just use "tail" as an endpoint
- #if(head.length > 0.001 and is_root_bone == 1):
- if(0):
- pb = make_vbone("dummy_" + blender_bone.name, parent_id, 1, FQuat(), tail)
- psk_file.AddBone(pb)
- pbb = make_namedbonebinary("dummy_" + blender_bone.name, parent_id, 1, FQuat(), tail, 0)
- psa_file.StoreBone(pbb)
- final_parent_id = nbone
- nbone = nbone + 1
- #tail = tail-head
-
- my_id = nbone
-
- pb = make_vbone(blender_bone.name, final_parent_id, child_count, quat, set_position)
- psk_file.AddBone(pb)
- pbb = make_namedbonebinary(blender_bone.name, final_parent_id, child_count, quat, set_position, 1)
- psa_file.StoreBone(pbb)
-
- nbone = nbone + 1
-
- #RG - dump influences for this bone - use the data we collected in the mesh dump phase
- # to map our bones to vertex groups
- #print("///////////////////////")
- #print("set influence")
- if blender_bone.name in psk_file.VertexGroups:
- vertex_list = psk_file.VertexGroups[blender_bone.name]
- #print("vertex list:", len(vertex_list), " of >" ,blender_bone.name )
- for vertex_data in vertex_list:
- #print("set influence vettex")
- point_index = vertex_data[0]
- vertex_weight = vertex_data[1]
- influence = VRawBoneInfluence()
- influence.Weight = vertex_weight
- #influence.BoneIndex = my_id
- influence.BoneIndex = my_id
- influence.PointIndex = point_index
- #print(influence)
- #print ('Adding Bone Influence for [%s] = Point Index=%i, Weight=%f' % (blender_bone.name, point_index, vertex_weight))
- #print("adding influence")
- psk_file.AddInfluence(influence)
-
- #blender_bone.matrix_local
- #recursively dump child bones
- mainparent = parent_matrix
- #if len(blender_bone.children) > 0:
- for current_child_bone in blender_bone.children:
- parse_bone(current_child_bone, psk_file, psa_file, my_id, 0, mainparent, parent_root)
+ global nbone # look it's evil!
+ #print '-------------------- Dumping Bone ---------------------- '
+
+ #If bone does not have parent that mean it the root bone
+ if blender_bone.parent == None:
+ parent_root = blender_bone
+
+
+ child_count = len(blender_bone.children)
+ #child of parent
+ child_parent = blender_bone.parent
+
+ if child_parent != None:
+ print ("--Bone Name:",blender_bone.name ," parent:" , blender_bone.parent.name, "ID:", nbone)
+ else:
+ print ("--Bone Name:",blender_bone.name ," parent: None" , "ID:", nbone)
+
+ if child_parent != None:
+ quat_root = blender_bone.matrix
+ quat = make_fquat(quat_root.to_quat())
+
+ quat_parent = child_parent.matrix.to_quat().inverse()
+ parent_head = child_parent.head * quat_parent
+ parent_tail = child_parent.tail * quat_parent
+
+ set_position = (parent_tail - parent_head) + blender_bone.head
+ else:
+ # ROOT BONE
+ #This for root
+ set_position = blender_bone.head * parent_matrix #ARMATURE OBJECT Locction
+ rot_mat = blender_bone.matrix * parent_matrix.rotation_part() #ARMATURE OBJECT Rotation
+ #print(dir(rot_mat))
+
+ quat = make_fquat_default(rot_mat.to_quat())
+
+ print ("[[======= FINAL POSITION:", set_position)
+ final_parent_id = parent_id
+
+ #RG/RE -
+ #if we are not seperated by a small distance, create a dummy bone for the displacement
+ #this is only needed for root bones, since UT assumes a connected skeleton, and from here
+ #down the chain we just use "tail" as an endpoint
+ #if(head.length > 0.001 and is_root_bone == 1):
+ if(0):
+ pb = make_vbone("dummy_" + blender_bone.name, parent_id, 1, FQuat(), tail)
+ psk_file.AddBone(pb)
+ pbb = make_namedbonebinary("dummy_" + blender_bone.name, parent_id, 1, FQuat(), tail, 0)
+ psa_file.StoreBone(pbb)
+ final_parent_id = nbone
+ nbone = nbone + 1
+ #tail = tail-head
+
+ my_id = nbone
+
+ pb = make_vbone(blender_bone.name, final_parent_id, child_count, quat, set_position)
+ psk_file.AddBone(pb)
+ pbb = make_namedbonebinary(blender_bone.name, final_parent_id, child_count, quat, set_position, 1)
+ psa_file.StoreBone(pbb)
+
+ nbone = nbone + 1
+
+ #RG - dump influences for this bone - use the data we collected in the mesh dump phase
+ # to map our bones to vertex groups
+ #print("///////////////////////")
+ #print("set influence")
+ if blender_bone.name in psk_file.VertexGroups:
+ vertex_list = psk_file.VertexGroups[blender_bone.name]
+ #print("vertex list:", len(vertex_list), " of >" ,blender_bone.name )
+ for vertex_data in vertex_list:
+ #print("set influence vettex")
+ point_index = vertex_data[0]
+ vertex_weight = vertex_data[1]
+ influence = VRawBoneInfluence()
+ influence.Weight = vertex_weight
+ #influence.BoneIndex = my_id
+ influence.BoneIndex = my_id
+ influence.PointIndex = point_index
+ #print(influence)
+ #print ('Adding Bone Influence for [%s] = Point Index=%i, Weight=%f' % (blender_bone.name, point_index, vertex_weight))
+ #print("adding influence")
+ psk_file.AddInfluence(influence)
+
+ #blender_bone.matrix_local
+ #recursively dump child bones
+ mainparent = parent_matrix
+ #if len(blender_bone.children) > 0:
+ for current_child_bone in blender_bone.children:
+ parse_bone(current_child_bone, psk_file, psa_file, my_id, 0, mainparent, parent_root)
def parse_armature(blender_armature, psk_file, psa_file):
- print ("----- parsing armature -----")
- print ('blender_armature length: %i' % (len(blender_armature)))
-
- #magic 0 sized root bone for UT - this is where all armature dummy bones will attach
- #dont increment nbone here because we initialize it to 1 (hackity hackity hack)
-
- #count top level bones first. NOT EFFICIENT.
- child_count = 0
- for current_obj in blender_armature:
- current_armature = current_obj.data
- bones = [x for x in current_armature.bones if not x.parent == None]
- child_count += len(bones)
-
- for current_obj in blender_armature:
- print ("Current Armature Name: " + current_obj.name)
- current_armature = current_obj.data
- #armature_id = make_armature_bone(current_obj, psk_file, psa_file)
-
- #we dont want children here - only the top level bones of the armature itself
- #we will recursively dump the child bones as we dump these bones
- """
- bones = [x for x in current_armature.bones if not x.parent == None]
- #will ingore this part of the ocde
- """
- for current_bone in current_armature.bones: #list the bone. #note this will list all the bones.
- if(current_bone.parent == None):
- parse_bone(current_bone, psk_file, psa_file, 0, 0, current_obj.matrix_local, None)
- break
-
-# get blender objects by type
+ print ("----- parsing armature -----")
+ print ('blender_armature length: %i' % (len(blender_armature)))
+
+ #magic 0 sized root bone for UT - this is where all armature dummy bones will attach
+ #dont increment nbone here because we initialize it to 1 (hackity hackity hack)
+
+ #count top level bones first. NOT EFFICIENT.
+ child_count = 0
+ for current_obj in blender_armature:
+ current_armature = current_obj.data
+ bones = [x for x in current_armature.bones if not x.parent == None]
+ child_count += len(bones)
+
+ for current_obj in blender_armature:
+ print ("Current Armature Name: " + current_obj.name)
+ current_armature = current_obj.data
+ #armature_id = make_armature_bone(current_obj, psk_file, psa_file)
+
+ #we dont want children here - only the top level bones of the armature itself
+ #we will recursively dump the child bones as we dump these bones
+ """
+ bones = [x for x in current_armature.bones if not x.parent == None]
+ #will ingore this part of the ocde
+ """
+ for current_bone in current_armature.bones: #list the bone. #note this will list all the bones.
+ if(current_bone.parent == None):
+ parse_bone(current_bone, psk_file, psa_file, 0, 0, current_obj.matrix_local, None)
+ break
+
+# get blender objects by type
def get_blender_objects(objects, intype):
- return [x for x in objects if x.type == intype]
-
+ return [x for x in objects if x.type == intype]
+
#strips current extension (if any) from filename and replaces it with extension passed in
def make_filename_ext(filename, extension):
- new_filename = ''
- extension_index = filename.find('.')
-
- if extension_index == -1:
- new_filename = filename + extension
- else:
- new_filename = filename[0:extension_index] + extension
-
- return new_filename
+ new_filename = ''
+ extension_index = filename.find('.')
+
+ if extension_index == -1:
+ new_filename = filename + extension
+ else:
+ new_filename = filename[0:extension_index] + extension
+
+ return new_filename
# returns the quaternion Grassman product a*b
# this is the same as the rotation a(b(x))
# (ie. the same as B*A if A and B are matrices representing
# the rotations described by quaternions a and b)
-def grassman(a, b):
- return mathutils.Quaternion(
- a.w*b.w - a.x*b.x - a.y*b.y - a.z*b.z,
- a.w*b.x + a.x*b.w + a.y*b.z - a.z*b.y,
- a.w*b.y - a.x*b.z + a.y*b.w + a.z*b.x,
- a.w*b.z + a.x*b.y - a.y*b.x + a.z*b.w)
-
+def grassman(a, b):
+ return mathutils.Quaternion(
+ a.w*b.w - a.x*b.x - a.y*b.y - a.z*b.z,
+ a.w*b.x + a.x*b.w + a.y*b.z - a.z*b.y,
+ a.w*b.y - a.x*b.z + a.y*b.w + a.z*b.x,
+ a.w*b.z + a.x*b.y - a.y*b.x + a.z*b.w)
+
def parse_animation(blender_scene, blender_armatures, psa_file):
- #to do list:
- #need to list the action sets
- #need to check if there animation
- #need to check if animation is has one frame then exit it
- print ('\n----- parsing animation -----')
- ##print(dir(blender_scene))
-
- #print(dir(blender_armatures))
-
- render_data = blender_scene.render
- bHaveAction = True
-
- anim_rate = render_data.fps
-
- #print("dir:",dir(blender_scene))
- #print(dir(bpy.data.actions))
- #print("dir:",dir(bpy.data.actions[0]))
-
-
- print("==== Blender Settings ====")
- print ('Scene: %s Start Frame: %i, End Frame: %i' % (blender_scene.name, blender_scene.frame_start, blender_scene.frame_end))
- print ('Frames Per Sec: %i' % anim_rate)
- print ("Default FPS: 24" )
-
- cur_frame_index = 0
-
- #print(dir(bpy.data.actions))
- #print(dir(bpy.context.scene.background_set))
-
- #list of armature objects
- for arm in blender_armatures:
- #check if there animation data from armature or something
- #print(dir(arm.animation_data))
- #print("[["+dir(arm.animation_data.action))
- if not arm.animation_data:
- print("======================================")
- print("Check Animation Data: None")
- print("Armature has no animation, skipping...")
- print("======================================")
- break
-
- if not arm.animation_data.action:
- print("======================================")
- print("Check Action: None")
- print("Armature has no animation, skipping...")
- print("======================================")
- break
- act = arm.animation_data.action
- #print(dir(act))
- action_name = act.name
-
- if not len(act.fcurves):
- print("//===========================================================")
- print("// None bone pose set keys for this action set... skipping...")
- print("//===========================================================")
- bHaveAction = False
-
- #this deal with action export control
- if bHaveAction == True:
- print("")
- print("==== Action Set ====")
- print("Action Name:",action_name)
- #look for min and max frame that current set keys
- framemin, framemax = act.frame_range
- #print("max frame:",framemax)
- start_frame = int(framemin)
- end_frame = int(framemax)
- scene_frames = range(start_frame, end_frame+1)
- frame_count = len(scene_frames)
- #===================================================
- anim = AnimInfoBinary()
- anim.Name = action_name
- anim.Group = "" #what is group?
- anim.NumRawFrames = frame_count
- anim.AnimRate = anim_rate
- anim.FirstRawFrame = cur_frame_index
- #===================================================
- count_previous_keys = len(psa_file.RawKeys.Data)
- print("Frame Key Set Count:",frame_count, "Total Frame:",frame_count)
- #print("init action bones...")
- unique_bone_indexes = {}
- # bone lookup table
- bones_lookup = {}
-
- #build bone node for animation keys needed to be set
- for bone in arm.data.bones:
- bones_lookup[bone.name] = bone
- #print("bone name:",bone.name)
- frame_count = len(scene_frames)
- #print ('Frame Count: %i' % frame_count)
- pose_data = arm.pose
-
- #these must be ordered in the order the bones will show up in the PSA file!
- ordered_bones = {}
- ordered_bones = sorted([(psa_file.UseBone(x.name), x) for x in pose_data.bones], key=operator.itemgetter(0))
-
- #############################
- # ORDERED FRAME, BONE
- #for frame in scene_frames:
-
- for i in range(frame_count):
- frame = scene_frames[i]
- #LOUD
- #print ("==== outputting frame %i ===" % frame)
-
- if frame_count > i+1:
- next_frame = scene_frames[i+1]
- #print "This Frame: %i, Next Frame: %i" % (frame, next_frame)
- else:
- next_frame = -1
- #print "This Frame: %i, Next Frame: NONE" % frame
-
- #frame start from 1 as number one from blender
- blender_scene.set_frame(frame)
-
- cur_frame_index = cur_frame_index + 1
- for bone_data in ordered_bones:
- bone_index = bone_data[0]
- pose_bone = bone_data[1]
- #print("[=====POSE NAME:",pose_bone.name)
-
- #print("LENG >>.",len(bones_lookup))
- blender_bone = bones_lookup[pose_bone.name]
-
- #just need the total unique bones used, later for this AnimInfoBinary
- unique_bone_indexes[bone_index] = bone_index
- #LOUD
- #print ("-------------------", pose_bone.name)
- head = pose_bone.head
-
- posebonemat = mathutils.Matrix(pose_bone.matrix)
- parent_pose = pose_bone.parent
- if parent_pose != None:
- parentposemat = mathutils.Matrix(parent_pose.matrix)
- #blender 2.4X it been flip around with new 2.50 (mat1 * mat2) should now be (mat2 * mat1)
- posebonemat = parentposemat.invert() * posebonemat
- head = posebonemat.translation_part()
- quat = posebonemat.to_quat().normalize()
- vkey = VQuatAnimKey()
- vkey.Position.X = head.x
- vkey.Position.Y = head.y
- vkey.Position.Z = head.z
-
- if parent_pose != None:
- quat = make_fquat(quat)
- else:
- quat = make_fquat_default(quat)
-
- vkey.Orientation = quat
- #print("Head:",head)
- #print("Orientation",quat)
-
- #time from now till next frame = diff / framesPerSec
- if next_frame >= 0:
- diff = next_frame - frame
- else:
- diff = 1.0
-
- #print ("Diff = ", diff)
- vkey.Time = float(diff)/float(anim_rate)
-
- psa_file.AddRawKey(vkey)
-
- #done looping frames
- #done looping armatures
- #continue adding animInfoBinary counts here
-
- anim.TotalBones = len(unique_bone_indexes)
- print("Bones Count:",anim.TotalBones)
- anim.TrackTime = float(frame_count) / anim.AnimRate
- print("Time Track Frame:",anim.TrackTime)
- psa_file.AddAnimation(anim)
- print("==== Finish Action Build(s) ====")
-
-exportmessage = "Export Finish"
-
+ #to do list:
+ #need to list the action sets
+ #need to check if there animation
+ #need to check if animation is has one frame then exit it
+ print ('\n----- parsing animation -----')
+ ##print(dir(blender_scene))
+
+ #print(dir(blender_armatures))
+
+ render_data = blender_scene.render
+ bHaveAction = True
+
+ anim_rate = render_data.fps
+
+ #print("dir:",dir(blender_scene))
+ #print(dir(bpy.data.actions))
+ #print("dir:",dir(bpy.data.actions[0]))
+
+
+ print("==== Blender Settings ====")
+ print ('Scene: %s Start Frame: %i, End Frame: %i' % (blender_scene.name, blender_scene.frame_start, blender_scene.frame_end))
+ print ('Frames Per Sec: %i' % anim_rate)
+ print ("Default FPS: 24" )
+
+ cur_frame_index = 0
+
+ #print(dir(bpy.data.actions))
+ #print(dir(bpy.context.scene.background_set))
+
+ #list of armature objects
+ for arm in blender_armatures:
+ #check if there animation data from armature or something
+ #print(dir(arm.animation_data))
+ #print("[["+dir(arm.animation_data.action))
+ if not arm.animation_data:
+ print("======================================")
+ print("Check Animation Data: None")
+ print("Armature has no animation, skipping...")
+ print("======================================")
+ break
+
+ if not arm.animation_data.action:
+ print("======================================")
+ print("Check Action: None")
+ print("Armature has no animation, skipping...")
+ print("======================================")
+ break
+ act = arm.animation_data.action
+ #print(dir(act))
+ action_name = act.name
+
+ if not len(act.fcurves):
+ print("//===========================================================")
+ print("// None bone pose set keys for this action set... skipping...")
+ print("//===========================================================")
+ bHaveAction = False
+
+ #this deal with action export control
+ if bHaveAction == True:
+ print("")
+ print("==== Action Set ====")
+ print("Action Name:",action_name)
+ #look for min and max frame that current set keys
+ framemin, framemax = act.frame_range
+ #print("max frame:",framemax)
+ start_frame = int(framemin)
+ end_frame = int(framemax)
+ scene_frames = range(start_frame, end_frame+1)
+ frame_count = len(scene_frames)
+ #===================================================
+ anim = AnimInfoBinary()
+ anim.Name = action_name
+ anim.Group = "" #what is group?
+ anim.NumRawFrames = frame_count
+ anim.AnimRate = anim_rate
+ anim.FirstRawFrame = cur_frame_index
+ #===================================================
+ count_previous_keys = len(psa_file.RawKeys.Data)
+ print("Frame Key Set Count:",frame_count, "Total Frame:",frame_count)
+ #print("init action bones...")
+ unique_bone_indexes = {}
+ # bone lookup table
+ bones_lookup = {}
+
+ #build bone node for animation keys needed to be set
+ for bone in arm.data.bones:
+ bones_lookup[bone.name] = bone
+ #print("bone name:",bone.name)
+ frame_count = len(scene_frames)
+ #print ('Frame Count: %i' % frame_count)
+ pose_data = arm.pose
+
+ #these must be ordered in the order the bones will show up in the PSA file!
+ ordered_bones = {}
+ ordered_bones = sorted([(psa_file.UseBone(x.name), x) for x in pose_data.bones], key=operator.itemgetter(0))
+
+ #############################
+ # ORDERED FRAME, BONE
+ #for frame in scene_frames:
+
+ for i in range(frame_count):
+ frame = scene_frames[i]
+ #LOUD
+ #print ("==== outputting frame %i ===" % frame)
+
+ if frame_count > i+1:
+ next_frame = scene_frames[i+1]
+ #print "This Frame: %i, Next Frame: %i" % (frame, next_frame)
+ else:
+ next_frame = -1
+ #print "This Frame: %i, Next Frame: NONE" % frame
+
+ #frame start from 1 as number one from blender
+ blender_scene.set_frame(frame)
+
+ cur_frame_index = cur_frame_index + 1
+ for bone_data in ordered_bones:
+ bone_index = bone_data[0]
+ pose_bone = bone_data[1]
+ #print("[=====POSE NAME:",pose_bone.name)
+
+ #print("LENG >>.",len(bones_lookup))
+ blender_bone = bones_lookup[pose_bone.name]
+
+ #just need the total unique bones used, later for this AnimInfoBinary
+ unique_bone_indexes[bone_index] = bone_index
+ #LOUD
+ #print ("-------------------", pose_bone.name)
+ head = pose_bone.head
+
+ posebonemat = mathutils.Matrix(pose_bone.matrix)
+ parent_pose = pose_bone.parent
+ if parent_pose != None:
+ parentposemat = mathutils.Matrix(parent_pose.matrix)
+ #blender 2.4X it been flip around with new 2.50 (mat1 * mat2) should now be (mat2 * mat1)
+ posebonemat = parentposemat.invert() * posebonemat
+ head = posebonemat.translation_part()
+ quat = posebonemat.to_quat().normalize()
+ vkey = VQuatAnimKey()
+ vkey.Position.X = head.x
+ vkey.Position.Y = head.y
+ vkey.Position.Z = head.z
+
+ if parent_pose != None:
+ quat = make_fquat(quat)
+ else:
+ quat = make_fquat_default(quat)
+
+ vkey.Orientation = quat
+ #print("Head:",head)
+ #print("Orientation",quat)
+
+ #time from now till next frame = diff / framesPerSec
+ if next_frame >= 0:
+ diff = next_frame - frame
+ else:
+ diff = 1.0
+
+ #print ("Diff = ", diff)
+ vkey.Time = float(diff)/float(anim_rate)
+
+ psa_file.AddRawKey(vkey)
+
+ #done looping frames
+ #done looping armatures
+ #continue adding animInfoBinary counts here
+
+ anim.TotalBones = len(unique_bone_indexes)
+ print("Bones Count:",anim.TotalBones)
+ anim.TrackTime = float(frame_count) / anim.AnimRate
+ print("Time Track Frame:",anim.TrackTime)
+ psa_file.AddAnimation(anim)
+ print("==== Finish Action Build(s) ====")
+
+exportmessage = "Export Finish"
+
def fs_callback(filename, context, user_setting):
- #this deal with repeat export and the reset settings
- global bonedata, BBCount, nbone, exportmessage
- bonedata = []#clear array
- BBCount = 0
- nbone = 0
-
- start_time = time.clock()
-
- print ("========EXPORTING TO UNREAL SKELETAL MESH FORMATS========\r\n")
- print("Blender Version:", bpy.app.version_string)
-
- psk = PSKFile()
- psa = PSAFile()
-
- #sanity check - this should already have the extension, but just in case, we'll give it one if it doesn't
- psk_filename = make_filename_ext(filename, '.psk')
-
- #make the psa filename
- psa_filename = make_filename_ext(filename, '.psa')
-
- print ('PSK File: ' + psk_filename)
- print ('PSA File: ' + psa_filename)
-
- barmature = True
- bmesh = True
- blender_meshes = []
- blender_armature = []
- selectmesh = []
- selectarmature = []
-
- current_scene = context.scene
- cur_frame = current_scene.frame_current #store current frame before we start walking them during animation parse
- objects = current_scene.objects
-
- print("Checking object count...")
- for next_obj in objects:
- if next_obj.type == 'MESH':
- blender_meshes.append(next_obj)
- if (next_obj.select):
- #print("mesh object select")
- selectmesh.append(next_obj)
- if next_obj.type == 'ARMATURE':
- blender_armature.append(next_obj)
- if (next_obj.select):
- #print("armature object select")
- selectarmature.append(next_obj)
-
- print("Mesh Count:",len(blender_meshes)," Armature Count:",len(blender_armature))
- print("====================================")
- print("Checking Mesh Condtion(s):")
- if len(blender_meshes) == 1:
- print(" - One Mesh Scene")
- elif (len(blender_meshes) > 1) and (len(selectmesh) == 1):
- print(" - One Mesh [Select]")
- else:
- print(" - Too Many Meshes!")
- print(" - Select One Mesh Object!")
- bmesh = False
- print("====================================")
- print("Checking Armature Condtion(s):")
- if len(blender_armature) == 1:
- print(" - One Armature Scene")
- elif (len(blender_armature) > 1) and (len(selectarmature) == 1):
- print(" - One Armature [Select]")
- else:
- print(" - Too Armature Meshes!")
- print(" - Select One Armature Object Only!")
- barmature = False
-
- if (bmesh == False) or (barmature == False):
- exportmessage = "Export Fail! Check Log."
- print("=================================")
- print("= Export Fail! =")
- print("=================================")
- else:
- exportmessage = "Export Finish!"
- #need to build a temp bone index for mesh group vertex
- BoneIndexArmature(blender_armature)
-
- try:
- #######################
- # STEP 1: MESH DUMP
- # we build the vertexes, wedges, and faces in here, as well as a vertexgroup lookup table
- # for the armature parse
- print("//===============================")
- print("// STEP 1")
- print("//===============================")
- parse_meshes(blender_meshes, psk)
- except:
- context.scene.set_frame(cur_frame) #set frame back to original frame
- print ("Exception during Mesh Parse")
- raise
-
- try:
- #######################
- # STEP 2: ARMATURE DUMP
- # IMPORTANT: do this AFTER parsing meshes - we need to use the vertex group data from
- # the mesh parse in here to generate bone influences
- print("//===============================")
- print("// STEP 2")
- print("//===============================")
- parse_armature(blender_armature, psk, psa)
-
- except:
- context.scene.set_frame(cur_frame) #set frame back to original frame
- print ("Exception during Armature Parse")
- raise
-
- try:
- #######################
- # STEP 3: ANIMATION DUMP
- # IMPORTANT: do AFTER parsing bones - we need to do bone lookups in here during animation frames
- print("//===============================")
- print("// STEP 3")
- print("//===============================")
- parse_animation(current_scene, blender_armature, psa)
-
- except:
- context.scene.set_frame(cur_frame) #set frame back to original frame
- print ("Exception during Animation Parse")
- raise
-
- # reset current frame
-
- context.scene.set_frame(cur_frame) #set frame back to original frame
-
- ##########################
- # FILE WRITE
- print("//===========================================")
- print("// bExportPsk:",bpy.context.scene.unrealexportpsk," bExportPsa:",bpy.context.scene.unrealexportpsa)
- print("//===========================================")
- if bpy.context.scene.unrealexportpsk == True:
- print("Writing Skeleton Mesh Data...")
- #RG - dump psk file
- psk.PrintOut()
- file = open(psk_filename, "wb")
- file.write(psk.dump())
- file.close()
- print ("Successfully Exported File: " + psk_filename)
- if bpy.context.scene.unrealexportpsa == True:
- print("Writing Animaiton Data...")
- #RG - dump psa file
- if not psa.IsEmpty():
- psa.PrintOut()
- file = open(psa_filename, "wb")
- file.write(psa.dump())
- file.close()
- print ("Successfully Exported File: " + psa_filename)
- else:
- print ("No Animations (.psa file) to Export")
-
- print ('PSK/PSA Export Script finished in %.2f seconds' % (time.clock() - start_time))
-
- #MSG BOX EXPORT COMPLETE
- #...
-
- #DONE
- print ("PSK/PSA Export Complete")
+ #this deal with repeat export and the reset settings
+ global bonedata, BBCount, nbone, exportmessage
+ bonedata = []#clear array
+ BBCount = 0
+ nbone = 0
+
+ start_time = time.clock()
+
+ print ("========EXPORTING TO UNREAL SKELETAL MESH FORMATS========\r\n")
+ print("Blender Version:", bpy.app.version_string)
+
+ psk = PSKFile()
+ psa = PSAFile()
+
+ #sanity check - this should already have the extension, but just in case, we'll give it one if it doesn't
+ psk_filename = make_filename_ext(filename, '.psk')
+
+ #make the psa filename
+ psa_filename = make_filename_ext(filename, '.psa')
+
+ print ('PSK File: ' + psk_filename)
+ print ('PSA File: ' + psa_filename)
+
+ barmature = True
+ bmesh = True
+ blender_meshes = []
+ blender_armature = []
+ selectmesh = []
+ selectarmature = []
+
+ current_scene = context.scene
+ cur_frame = current_scene.frame_current #store current frame before we start walking them during animation parse
+ objects = current_scene.objects
+
+ print("Checking object count...")
+ for next_obj in objects:
+ if next_obj.type == 'MESH':
+ blender_meshes.append(next_obj)
+ if (next_obj.select):
+ #print("mesh object select")
+ selectmesh.append(next_obj)
+ if next_obj.type == 'ARMATURE':
+ blender_armature.append(next_obj)
+ if (next_obj.select):
+ #print("armature object select")
+ selectarmature.append(next_obj)
+
+ print("Mesh Count:",len(blender_meshes)," Armature Count:",len(blender_armature))
+ print("====================================")
+ print("Checking Mesh Condtion(s):")
+ if len(blender_meshes) == 1:
+ print(" - One Mesh Scene")
+ elif (len(blender_meshes) > 1) and (len(selectmesh) == 1):
+ print(" - One Mesh [Select]")
+ else:
+ print(" - Too Many Meshes!")
+ print(" - Select One Mesh Object!")
+ bmesh = False
+ print("====================================")
+ print("Checking Armature Condtion(s):")
+ if len(blender_armature) == 1:
+ print(" - One Armature Scene")
+ elif (len(blender_armature) > 1) and (len(selectarmature) == 1):
+ print(" - One Armature [Select]")
+ else:
+ print(" - Too Armature Meshes!")
+ print(" - Select One Armature Object Only!")
+ barmature = False
+
+ if (bmesh == False) or (barmature == False):
+ exportmessage = "Export Fail! Check Log."
+ print("=================================")
+ print("= Export Fail! =")
+ print("=================================")
+ else:
+ exportmessage = "Export Finish!"
+ #need to build a temp bone index for mesh group vertex
+ BoneIndexArmature(blender_armature)
+
+ try:
+ #######################
+ # STEP 1: MESH DUMP
+ # we build the vertexes, wedges, and faces in here, as well as a vertexgroup lookup table
+ # for the armature parse
+ print("//===============================")
+ print("// STEP 1")
+ print("//===============================")
+ parse_meshes(blender_meshes, psk)
+ except:
+ context.scene.set_frame(cur_frame) #set frame back to original frame
+ print ("Exception during Mesh Parse")
+ raise
+
+ try:
+ #######################
+ # STEP 2: ARMATURE DUMP
+ # IMPORTANT: do this AFTER parsing meshes - we need to use the vertex group data from
+ # the mesh parse in here to generate bone influences
+ print("//===============================")
+ print("// STEP 2")
+ print("//===============================")
+ parse_armature(blender_armature, psk, psa)
+
+ except:
+ context.scene.set_frame(cur_frame) #set frame back to original frame
+ print ("Exception during Armature Parse")
+ raise
+
+ try:
+ #######################
+ # STEP 3: ANIMATION DUMP
+ # IMPORTANT: do AFTER parsing bones - we need to do bone lookups in here during animation frames
+ print("//===============================")
+ print("// STEP 3")
+ print("//===============================")
+ parse_animation(current_scene, blender_armature, psa)
+
+ except:
+ context.scene.set_frame(cur_frame) #set frame back to original frame
+ print ("Exception during Animation Parse")
+ raise
+
+ # reset current frame
+
+ context.scene.set_frame(cur_frame) #set frame back to original frame
+
+ ##########################
+ # FILE WRITE
+ print("//===========================================")
+ print("// bExportPsk:",bpy.context.scene.unrealexportpsk," bExportPsa:",bpy.context.scene.unrealexportpsa)
+ print("//===========================================")
+ if bpy.context.scene.unrealexportpsk == True:
+ print("Writing Skeleton Mesh Data...")
+ #RG - dump psk file
+ psk.PrintOut()
+ file = open(psk_filename, "wb")
+ file.write(psk.dump())
+ file.close()
+ print ("Successfully Exported File: " + psk_filename)
+ if bpy.context.scene.unrealexportpsa == True:
+ print("Writing Animaiton Data...")
+ #RG - dump psa file
+ if not psa.IsEmpty():
+ psa.PrintOut()
+ file = open(psa_filename, "wb")
+ file.write(psa.dump())
+ file.close()
+ print ("Successfully Exported File: " + psa_filename)
+ else:
+ print ("No Animations (.psa file) to Export")
+
+ print ('PSK/PSA Export Script finished in %.2f seconds' % (time.clock() - start_time))
+
+ #MSG BOX EXPORT COMPLETE
+ #...
+
+ #DONE
+ print ("PSK/PSA Export Complete")
def write_data(path, context, user_setting):
- print("//============================")
- print("// running psk/psa export...")
- print("//============================")
- fs_callback(path, context, user_setting)
- pass
+ print("//============================")
+ print("// running psk/psa export...")
+ print("//============================")
+ fs_callback(path, context, user_setting)
+ pass
from bpy.props import *
@@ -1434,158 +1437,158 @@ IntProperty= bpy.types.Scene.IntProperty
IntProperty(attr="unrealfpsrate", name="fps rate",
description="Set the frame per second (fps) for unreal.",
default=24,min=1,max=100)
-
+
bpy.types.Scene.EnumProperty( attr="unrealexport_settings",
name="Export:",
description="Select a export settings (psk/psa/all)...",
items = exporttypedata, default = '0')
-
+
bpy.types.Scene.BoolProperty( attr="unrealtriangulatebool",
name="Triangulate Mesh",
description="Convert Quad to Tri Mesh Boolean...",
default=False)
-
+
bpy.types.Scene.BoolProperty( attr="unrealactionexportall",
name="All Actions",
description="This let you export all actions from current armature.[Not Build Yet]",
- default=False)
-
+ default=False)
+
bpy.types.Scene.BoolProperty( attr="unrealexportpsk",
name="bool export psa",
description="bool for exporting this psk format",
default=False)
-
+
bpy.types.Scene.BoolProperty( attr="unrealexportpsa",
name="bool export psa",
description="bool for exporting this psa format",
default=False)
class ExportUDKAnimData(bpy.types.Operator):
- global exportmessage
- '''Export Skeleton Mesh / Animation Data file(s)'''
- bl_idname = "export.udk_anim_data" # this is important since its how bpy.ops.export.udk_anim_data is constructed
- bl_label = "Export PSK/PSA"
- __doc__ = "One mesh and one armature else select one mesh or armature to be exported."
-
- # List of operator properties, the attributes will be assigned
- # to the class instance from the operator settings before calling.
-
- filepath = StringProperty(name="File Path", description="Filepath used for exporting the PSA file", maxlen= 1024, default= "")
- use_setting = BoolProperty(name="No Options Yet", description="No Options Yet", default= True)
- pskexportbool = BoolProperty(name="Export PSK", description="Export Skeletal Mesh", default= True)
- psaexportbool = BoolProperty(name="Export PSA", description="Export Action Set (Animation Data)", default= True)
- actionexportall = BoolProperty(name="All Actions", description="This will export all the actions that matches the current armature.", default=False)
-
- @classmethod
- def poll(cls, context):
- return context.active_object != None
-
- def execute(self, context):
- #check if skeleton mesh is needed to be exported
- if (self.properties.pskexportbool):
- bpy.context.scene.unrealexportpsk = True
- else:
- bpy.context.scene.unrealexportpsk = False
- #check if animation data is needed to be exported
- if (self.properties.psaexportbool):
- bpy.context.scene.unrealexportpsa = True
- else:
- bpy.context.scene.unrealexportpsa = False
-
- write_data(self.properties.filepath, context, self.properties.use_setting)
-
- self.report({'WARNING', 'INFO'}, exportmessage)
- return {'FINISHED'}
-
- def invoke(self, context, event):
- wm = context.manager
- wm.add_fileselect(self)
- return {'RUNNING_MODAL'}
+ global exportmessage
+ '''Export Skeleton Mesh / Animation Data file(s)'''
+ bl_idname = "export.udk_anim_data" # this is important since its how bpy.ops.export.udk_anim_data is constructed
+ bl_label = "Export PSK/PSA"
+ __doc__ = "One mesh and one armature else select one mesh or armature to be exported."
+
+ # List of operator properties, the attributes will be assigned
+ # to the class instance from the operator settings before calling.
+
+ filepath = StringProperty(name="File Path", description="Filepath used for exporting the PSA file", maxlen= 1024, default= "")
+ use_setting = BoolProperty(name="No Options Yet", description="No Options Yet", default= True)
+ pskexportbool = BoolProperty(name="Export PSK", description="Export Skeletal Mesh", default= True)
+ psaexportbool = BoolProperty(name="Export PSA", description="Export Action Set (Animation Data)", default= True)
+ actionexportall = BoolProperty(name="All Actions", description="This will export all the actions that matches the current armature.", default=False)
+
+ @classmethod
+ def poll(cls, context):
+ return context.active_object != None
+
+ def execute(self, context):
+ #check if skeleton mesh is needed to be exported
+ if (self.properties.pskexportbool):
+ bpy.context.scene.unrealexportpsk = True
+ else:
+ bpy.context.scene.unrealexportpsk = False
+ #check if animation data is needed to be exported
+ if (self.properties.psaexportbool):
+ bpy.context.scene.unrealexportpsa = True
+ else:
+ bpy.context.scene.unrealexportpsa = False
+
+ write_data(self.properties.filepath, context, self.properties.use_setting)
+
+ self.report({'WARNING', 'INFO'}, exportmessage)
+ return {'FINISHED'}
+
+ def invoke(self, context, event):
+ wm = context.manager
+ wm.add_fileselect(self)
+ return {'RUNNING_MODAL'}
class VIEW3D_PT_unrealtools_objectmode(bpy.types.Panel):
- bl_space_type = "VIEW_3D"
- bl_region_type = "TOOLS"
- bl_label = "Unreal Tools"
-
- @classmethod
- def poll(cls, context):
- return context.active_object
-
- def draw(self, context):
- layout = self.layout
- #layout.label(text="Unreal Tools")
- rd = context.scene
- #drop box
- layout.prop(rd, "unrealexport_settings",expand=True)
- #layout.prop(rd, "unrealexport_settings")
- #button
- layout.operator("object.UnrealExport")
- #FPS #it use the real data from your scene
- layout.prop(rd.render, "fps")
-
- layout.prop(rd, "unrealactionexportall")
- #row = layout.row()
- #row.label(text="Action Set(s)(not build)")
- #for action in bpy.data.actions:
- #print(dir( action))
- #print(action.frame_range)
- #row = layout.row()
- #row.prop(action, "name")
-
- #print(dir(action.groups[0]))
- #for g in action.groups:#those are bones
- #print("group...")
- #print(dir(g))
- #print("////////////")
- #print((g.name))
- #print("////////////")
-
- #row.label(text="Active:" + action.select)
- btrimesh = False
-
+ bl_space_type = "VIEW_3D"
+ bl_region_type = "TOOLS"
+ bl_label = "Unreal Tools"
+
+ @classmethod
+ def poll(cls, context):
+ return context.active_object
+
+ def draw(self, context):
+ layout = self.layout
+ #layout.label(text="Unreal Tools")
+ rd = context.scene
+ #drop box
+ layout.prop(rd, "unrealexport_settings",expand=True)
+ #layout.prop(rd, "unrealexport_settings")
+ #button
+ layout.operator("object.UnrealExport")
+ #FPS #it use the real data from your scene
+ layout.prop(rd.render, "fps")
+
+ layout.prop(rd, "unrealactionexportall")
+ #row = layout.row()
+ #row.label(text="Action Set(s)(not build)")
+ #for action in bpy.data.actions:
+ #print(dir( action))
+ #print(action.frame_range)
+ #row = layout.row()
+ #row.prop(action, "name")
+
+ #print(dir(action.groups[0]))
+ #for g in action.groups:#those are bones
+ #print("group...")
+ #print(dir(g))
+ #print("////////////")
+ #print((g.name))
+ #print("////////////")
+
+ #row.label(text="Active:" + action.select)
+ btrimesh = False
+
class OBJECT_OT_UnrealExport(bpy.types.Operator):
- global exportmessage
- bl_idname = "OBJECT_OT_UnrealExport"
- bl_label = "Unreal Export"
- __doc__ = "Select export setting for .psk/.psa or both."
-
- def invoke(self, context, event):
- #path = StringProperty(name="File Path", description="File path used for exporting the PSA file", maxlen= 1024, default= "")
- print("Init Export Script:")
- if(int(bpy.context.scene.unrealexport_settings) == 0):
- bpy.context.scene.unrealexportpsk = True
- bpy.context.scene.unrealexportpsa = False
- print("Exporting PSK...")
- if(int(bpy.context.scene.unrealexport_settings) == 1):
- bpy.context.scene.unrealexportpsk = False
- bpy.context.scene.unrealexportpsa = True
- print("Exporting PSA...")
- if(int(bpy.context.scene.unrealexport_settings) == 2):
- bpy.context.scene.unrealexportpsk = True
- bpy.context.scene.unrealexportpsa = True
- print("Exporting ALL...")
-
- default_path = os.path.splitext(bpy.data.filepath)[0] + ".psk"
- fs_callback(default_path, bpy.context, False)
-
- #self.report({'WARNING', 'INFO'}, exportmessage)
- self.report({'INFO'}, exportmessage)
- return{'FINISHED'}
+ global exportmessage
+ bl_idname = "OBJECT_OT_UnrealExport"
+ bl_label = "Unreal Export"
+ __doc__ = "Select export setting for .psk/.psa or both."
+
+ def invoke(self, context, event):
+ #path = StringProperty(name="File Path", description="File path used for exporting the PSA file", maxlen= 1024, default= "")
+ print("Init Export Script:")
+ if(int(bpy.context.scene.unrealexport_settings) == 0):
+ bpy.context.scene.unrealexportpsk = True
+ bpy.context.scene.unrealexportpsa = False
+ print("Exporting PSK...")
+ if(int(bpy.context.scene.unrealexport_settings) == 1):
+ bpy.context.scene.unrealexportpsk = False
+ bpy.context.scene.unrealexportpsa = True
+ print("Exporting PSA...")
+ if(int(bpy.context.scene.unrealexport_settings) == 2):
+ bpy.context.scene.unrealexportpsk = True
+ bpy.context.scene.unrealexportpsa = True
+ print("Exporting ALL...")
+
+ default_path = os.path.splitext(bpy.data.filepath)[0] + ".psk"
+ fs_callback(default_path, bpy.context, False)
+
+ #self.report({'WARNING', 'INFO'}, exportmessage)
+ self.report({'INFO'}, exportmessage)
+ return{'FINISHED'}
def menu_func(self, context):
- bpy.context.scene.unrealexportpsk = True
- bpy.context.scene.unrealexportpsa = True
- default_path = os.path.splitext(bpy.data.filepath)[0] + ".psk"
- self.layout.operator("export.udk_anim_data", text="Skeleton Mesh / Animation Data (.psk/.psa)").filepath = default_path
+ bpy.context.scene.unrealexportpsk = True
+ bpy.context.scene.unrealexportpsa = True
+ default_path = os.path.splitext(bpy.data.filepath)[0] + ".psk"
+ self.layout.operator("export.udk_anim_data", text="Skeleton Mesh / Animation Data (.psk/.psa)").filepath = default_path
def register():
- bpy.types.INFO_MT_file_export.append(menu_func)
+ bpy.types.INFO_MT_file_export.append(menu_func)
def unregister():
- bpy.types.INFO_MT_file_export.remove(menu_func)
+ bpy.types.INFO_MT_file_export.remove(menu_func)
if __name__ == "__main__":
register()
diff --git a/io_import_images_as_planes.py b/io_import_images_as_planes.py
index e1aa672b..0a7b4a84 100644
--- a/io_import_images_as_planes.py
+++ b/io_import_images_as_planes.py
@@ -17,15 +17,18 @@
# ##### END GPL LICENSE BLOCK #####
bl_addon_info = {
- "name": "Import: Images as Planes",
+ "name": "Import Images as Planes",
"author": "Florian Meyer (testscreenings)",
- "version": "0.7",
+ "version": (0,7),
"blender": (2, 5, 3),
+ "api": 31667,
"location": "File > Import > Images as Planes",
- "description": "Imports images and creates planes with the appropiate aspect ratio. The images are mapped to the planes.",
+ "description": "Imports images and creates planes with the appropriate aspect ratio. The images are mapped to the planes.",
"warning": "",
- "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.5/Py/Scripts/Add_Mesh/Planes_from_Images",
- "tracker_url": "https://projects.blender.org/tracker/index.php?func=detail&aid=21751&group_id=153&atid=469",
+ "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.5/Py/"\
+ "Scripts/Add_Mesh/Planes_from_Images",
+ "tracker_url": "https://projects.blender.org/tracker/index.php?"\
+ "func=detail&aid=21751&group_id=153&atid=469",
"category": "Import/Export"}
"""
diff --git a/io_import_scene_mhx.py b/io_import_scene_mhx.py
index 628d1c8a..00df0c8e 100644
--- a/io_import_scene_mhx.py
+++ b/io_import_scene_mhx.py
@@ -17,29 +17,32 @@
# ##### END GPL LICENSE BLOCK #####
bl_addon_info = {
- "name": "Import: MakeHuman (.mhx)",
+ "name": "Import MakeHuman (.mhx)",
"author": "Thomas Larsson",
- "version": "0.9, Make Human Alpha 5",
+ "version": (0,9,5),
"blender": (2, 5, 3),
+ "api": 31667,
"location": "File > Import",
"description": "Import files in the MakeHuman eXchange format (.mhx)",
- "warning": "",
- "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.5/Py/Scripts/File_I-O/Make_Human",
- "tracker_url": "https://projects.blender.org/tracker/index.php?func=detail&aid=21872&group_id=153&atid=469",
+ "warning": "Alpha version",
+ "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.5/Py/"
+ "Scripts/File_I-O/Make_Human",
+ "tracker_url": "https://projects.blender.org/tracker/index.php?"
+ "func=detail&aid=21872&group_id=153&atid=469",
"category": "Import/Export"}
"""
-**Project Name:** MakeHuman
+**Project Name:** MakeHuman
**Product Home Page:** http://www.makehuman.org/
-**Code Home Page:** http://code.google.com/p/makehuman/
+**Code Home Page:** http://code.google.com/p/makehuman/
-**Authors:** Thomas Larsson
+**Authors:** Thomas Larsson
-**Copyright(c):** MakeHuman Team 2001-2010
+**Copyright(c):** MakeHuman Team 2001-2010
-**Licensing:** GPL3 (see also http://sites.google.com/site/makehumandocs/licensing)
+**Licensing:** GPL3 (see also http://sites.google.com/site/makehumandocs/licensing)
**Coding Standards:** See http://sites.google.com/site/makehumandocs/developers-guide
@@ -94,7 +97,7 @@ theTempDatum = None
todo = []
#
-# toggle flags
+# toggle flags
#
T_ArmIK = 0x01
@@ -115,16 +118,16 @@ T_MHX = 0x8000
toggle = T_Replace + T_ArmIK + T_LegIK + T_Mesh + T_Armature + T_Face
#
-# setFlagsAndFloats(rigFlags):
+# setFlagsAndFloats(rigFlags):
#
-# Global floats
+# Global floats
fLegIK = 0.0
fArmIK = 0.0
fFingerPanel = 0.0
fFingerIK = 0.0
fFingerCurl = 0.0
-# rigLeg and rigArm flags
+# rigLeg and rigArm flags
T_Toes = 0x0001
T_GoboFoot = 0x0002
T_InvFoot = 0x0004
@@ -140,2071 +143,2071 @@ rigLeg = 0
rigArm = 0
def setFlagsAndFloats(rigFlags):
- global toggle, rigLeg, rigArm
+ global toggle, rigLeg, rigArm
- (footRig, fingerRig) = rigFlags
- rigLeg = 0
- rigArm = 0
- if footRig == 'Reverse foot': rigLeg |= T_InvFoot
- elif footRig == 'Gobo': rigLeg |= T_GoboFoot
+ (footRig, fingerRig) = rigFlags
+ rigLeg = 0
+ rigArm = 0
+ if footRig == 'Reverse foot': rigLeg |= T_InvFoot
+ elif footRig == 'Gobo': rigLeg |= T_GoboFoot
- if fingerRig == 'Panel': rigArm |= T_FingerPanel
- elif fingerRig == 'IK': rigArm |= T_FingerIK
- elif fingerRig == 'Curl': rigArm |= T_FingerCurl
+ if fingerRig == 'Panel': rigArm |= T_FingerPanel
+ elif fingerRig == 'IK': rigArm |= T_FingerIK
+ elif fingerRig == 'Curl': rigArm |= T_FingerCurl
- toggle |= T_Panel
+ toggle |= T_Panel
- # Global floats, used as influences
- global fFingerCurl, fLegIK, fArmIK, fFingerIK
+ # Global floats, used as influences
+ global fFingerCurl, fLegIK, fArmIK, fFingerIK
- fFingerCurl = 1.0 if rigArm&T_FingerCurl else 0.0
- fLegIK = 1.0 if toggle&T_LegIK else 0.0
- fArmIK = 1.0 if toggle&T_ArmIK else 0.0
- fFingerIK = 1.0 if rigArm&T_FingerIK else 0.0
+ fFingerCurl = 1.0 if rigArm&T_FingerCurl else 0.0
+ fLegIK = 1.0 if toggle&T_LegIK else 0.0
+ fArmIK = 1.0 if toggle&T_ArmIK else 0.0
+ fFingerIK = 1.0 if rigArm&T_FingerIK else 0.0
- return
+ return
#
-# Dictionaries
+# Dictionaries
#
loadedData = {
- 'NONE' : {},
-
- 'Object' : {},
- 'Mesh' : {},
- 'Armature' : {},
- 'Lamp' : {},
- 'Camera' : {},
- 'Lattice' : {},
- 'Curve' : {},
-
- 'Material' : {},
- 'Image' : {},
- 'MaterialTextureSlot' : {},
- 'Texture' : {},
-
- 'Bone' : {},
- 'BoneGroup' : {},
- 'Rigify' : {},
-
- 'Action' : {},
- 'Group' : {},
-
- 'MeshTextureFaceLayer' : {},
- 'MeshColorLayer' : {},
- 'VertexGroup' : {},
- 'ShapeKey' : {},
- 'ParticleSystem' : {},
-
- 'ObjectConstraints' : {},
- 'ObjectModifiers' : {},
- 'MaterialSlot' : {},
+ 'NONE' : {},
+
+ 'Object' : {},
+ 'Mesh' : {},
+ 'Armature' : {},
+ 'Lamp' : {},
+ 'Camera' : {},
+ 'Lattice' : {},
+ 'Curve' : {},
+
+ 'Material' : {},
+ 'Image' : {},
+ 'MaterialTextureSlot' : {},
+ 'Texture' : {},
+
+ 'Bone' : {},
+ 'BoneGroup' : {},
+ 'Rigify' : {},
+
+ 'Action' : {},
+ 'Group' : {},
+
+ 'MeshTextureFaceLayer' : {},
+ 'MeshColorLayer' : {},
+ 'VertexGroup' : {},
+ 'ShapeKey' : {},
+ 'ParticleSystem' : {},
+
+ 'ObjectConstraints' : {},
+ 'ObjectModifiers' : {},
+ 'MaterialSlot' : {},
}
Plural = {
- 'Object' : 'objects',
- 'Mesh' : 'meshes',
- 'Lattice' : 'lattices',
- 'Curve' : 'curves',
- 'Group' : 'groups',
- 'Empty' : 'empties',
- 'Armature' : 'armatures',
- 'Bone' : 'bones',
- 'BoneGroup' : 'bone_groups',
- 'Pose' : 'poses',
- 'PoseBone' : 'pose_bones',
- 'Material' : 'materials',
- 'Texture' : 'textures',
- 'Image' : 'images',
- 'Camera' : 'cameras',
- 'Lamp' : 'lamps',
- 'World' : 'worlds',
+ 'Object' : 'objects',
+ 'Mesh' : 'meshes',
+ 'Lattice' : 'lattices',
+ 'Curve' : 'curves',
+ 'Group' : 'groups',
+ 'Empty' : 'empties',
+ 'Armature' : 'armatures',
+ 'Bone' : 'bones',
+ 'BoneGroup' : 'bone_groups',
+ 'Pose' : 'poses',
+ 'PoseBone' : 'pose_bones',
+ 'Material' : 'materials',
+ 'Texture' : 'textures',
+ 'Image' : 'images',
+ 'Camera' : 'cameras',
+ 'Lamp' : 'lamps',
+ 'World' : 'worlds',
}
#
-# Creators
+# Creators
#
def uvtexCreator(me, name):
- print("uvtexCreator", me, name)
- return me.uv_textures.new(name)
+ print("uvtexCreator", me, name)
+ return me.uv_textures.new(name)
def vertcolCreator(me, name):
- print("vertcolCreator", me, name)
- return me.vertex_colors.new(name)
+ print("vertcolCreator", me, name)
+ return me.vertex_colors.new(name)
#
-# loadMhx(filePath, context, flags):
+# loadMhx(filePath, context, flags):
#
def loadMhx(filePath, context, flags):
- global toggle
- toggle = flags
- readMhxFile(filePath)
- return
+ global toggle
+ toggle = flags
+ readMhxFile(filePath)
+ return
#
-# readMhxFile(filePath, rigFlags):
+# readMhxFile(filePath, rigFlags):
#
def readMhxFile(filePath, rigFlags):
- global todo, nErrors
-
- fileName = os.path.expanduser(filePath)
- (shortName, ext) = os.path.splitext(fileName)
- if ext != ".mhx":
- print("Error: Not a mhx file: " + fileName)
- return
- print( "Opening MHX file "+ fileName )
- time1 = time.clock()
-
- ignore = False
- stack = []
- tokens = []
- key = "toplevel"
- level = 0
- nErrors = 0
-
- setFlagsAndFloats(rigFlags)
-
- file= open(fileName, "rU")
- print( "Tokenizing" )
- lineNo = 0
- for line in file:
- # print(line)
- lineSplit= line.split()
- lineNo += 1
- if len(lineSplit) == 0:
- pass
- elif lineSplit[0] == '#':
- pass
- elif lineSplit[0] == 'end':
- try:
- sub = tokens
- tokens = stack.pop()
- if tokens:
- tokens[-1][2] = sub
- level -= 1
- except:
- print( "Tokenizer error at or before line %d" % lineNo )
- print( line )
- dummy = stack.pop()
- elif lineSplit[-1] == ';':
- if lineSplit[0] == '\\':
- key = lineSplit[1]
- tokens.append([key,lineSplit[2:-1],[]])
- else:
- key = lineSplit[0]
- tokens.append([key,lineSplit[1:-1],[]])
- else:
- key = lineSplit[0]
- tokens.append([key,lineSplit[1:],[]])
- stack.append(tokens)
- level += 1
- tokens = []
- file.close()
-
- if level != 0:
- raise NameError("Tokenizer out of kilter %d" % level)
- clearScene()
- print( "Parsing" )
- parse(tokens)
-
- for (expr, glbals, lcals) in todo:
- try:
- # print("Doing %s" % expr)
- exec(expr, glbals, lcals)
- except:
- msg = "Failed: "+expr
- print( msg )
- nErrors += 1
- #raise NameError(msg)
-
- print("Postprocess")
- postProcess()
- print("HideLayers")
- hideLayers()
- time2 = time.clock()
- print("toggle = %x" % toggle)
- msg = "File %s loaded in %g s" % (fileName, time2-time1)
- if nErrors:
- msg += " but there where %d errors. " % (nErrors)
- print(msg)
- return # loadMhx
-
-#
-# getObject(name, var, glbals, lcals):
+ global todo, nErrors
+
+ fileName = os.path.expanduser(filePath)
+ (shortName, ext) = os.path.splitext(fileName)
+ if ext != ".mhx":
+ print("Error: Not a mhx file: " + fileName)
+ return
+ print( "Opening MHX file "+ fileName )
+ time1 = time.clock()
+
+ ignore = False
+ stack = []
+ tokens = []
+ key = "toplevel"
+ level = 0
+ nErrors = 0
+
+ setFlagsAndFloats(rigFlags)
+
+ file= open(fileName, "rU")
+ print( "Tokenizing" )
+ lineNo = 0
+ for line in file:
+ # print(line)
+ lineSplit= line.split()
+ lineNo += 1
+ if len(lineSplit) == 0:
+ pass
+ elif lineSplit[0] == '#':
+ pass
+ elif lineSplit[0] == 'end':
+ try:
+ sub = tokens
+ tokens = stack.pop()
+ if tokens:
+ tokens[-1][2] = sub
+ level -= 1
+ except:
+ print( "Tokenizer error at or before line %d" % lineNo )
+ print( line )
+ dummy = stack.pop()
+ elif lineSplit[-1] == ';':
+ if lineSplit[0] == '\\':
+ key = lineSplit[1]
+ tokens.append([key,lineSplit[2:-1],[]])
+ else:
+ key = lineSplit[0]
+ tokens.append([key,lineSplit[1:-1],[]])
+ else:
+ key = lineSplit[0]
+ tokens.append([key,lineSplit[1:],[]])
+ stack.append(tokens)
+ level += 1
+ tokens = []
+ file.close()
+
+ if level != 0:
+ raise NameError("Tokenizer out of kilter %d" % level)
+ clearScene()
+ print( "Parsing" )
+ parse(tokens)
+
+ for (expr, glbals, lcals) in todo:
+ try:
+ # print("Doing %s" % expr)
+ exec(expr, glbals, lcals)
+ except:
+ msg = "Failed: "+expr
+ print( msg )
+ nErrors += 1
+ #raise NameError(msg)
+
+ print("Postprocess")
+ postProcess()
+ print("HideLayers")
+ hideLayers()
+ time2 = time.clock()
+ print("toggle = %x" % toggle)
+ msg = "File %s loaded in %g s" % (fileName, time2-time1)
+ if nErrors:
+ msg += " but there where %d errors. " % (nErrors)
+ print(msg)
+ return # loadMhx
+
+#
+# getObject(name, var, glbals, lcals):
#
def getObject(name, var, glbals, lcals):
- try:
- ob = loadedData['Object'][name]
- except:
- if name != "None":
- expr = "%s = loadedData['Object'][name]" % var
- print("Todo ", expr)
- todo.append((expr, glbals, lcals))
- ob = None
- return ob
+ try:
+ ob = loadedData['Object'][name]
+ except:
+ if name != "None":
+ expr = "%s = loadedData['Object'][name]" % var
+ print("Todo ", expr)
+ todo.append((expr, glbals, lcals))
+ ob = None
+ return ob
#
-# parse(tokens):
+# parse(tokens):
#
ifResult = False
def parse(tokens):
- global warnedVersion, MHX249, ifResult
-
- for (key, val, sub) in tokens:
- # print("Parse %s" % key)
- data = None
- if key == 'MHX':
- if int(val[0]) != MAJOR_VERSION and int(val[1]) != MINOR_VERSION and not warnedVersion:
- print("Warning: \nThis file was created with another version of MHX\n")
- warnedVersion = True
-
- elif key == 'MHX249':
- MHX249 = eval(val[0])
- print("Blender 2.49 compatibility mode is %s\n" % MHX249)
-
- elif key == 'if':
- try:
- ifResult = eval(val[0])
- except:
- ifResult = False
- if ifResult:
- parse(sub)
-
- elif key == 'elif':
- if not ifResult:
- try:
- ifResult = eval(val[0])
- except:
- ifResult = False
- if ifResult:
- parse(sub)
-
- elif key == 'else':
- if not ifResult:
- parse(sub)
-
-
- elif MHX249:
- pass
-
- elif key == 'print':
- msg = concatList(val)
- print(msg)
- elif key == 'warn':
- msg = concatList(val)
- print(msg)
- elif key == 'error':
- msg = concatList(val)
- raise NameError(msg)
- elif key == "Object":
- parseObject(val, sub)
- elif key == "Mesh":
- data = parseMesh(val, sub)
- elif key == "Curve":
- data = parseCurve(val, sub)
- elif key == "Lattice":
- data = parseLattice(val, sub)
- elif key == "Group":
- data = parseGroup(val, sub)
- elif key == "Armature":
- data = parseArmature(val, sub)
- elif key == "Pose":
- data = parsePose(val, sub)
- elif key == "Action":
- data = parseAction(val, sub)
- elif key == "Material":
- data = parseMaterial(val, sub)
- elif key == "Texture":
- data = parseTexture(val, sub)
- elif key == "Image":
- data = parseImage(val, sub)
- elif key == "Process":
- parseProcess(val, sub)
- elif key == 'AnimationData':
- try:
- ob = loadedData['Object'][val[0]]
- except:
- ob = None
- if ob:
- bpy.context.scene.objects.active = ob
- parseAnimationData(ob, sub)
- elif key == 'ShapeKeys':
- try:
- ob = loadedData['Object'][val[0]]
- except:
- ob = None
- if ob:
- bpy.context.scene.objects.active = ob
- parseShapeKeys(ob, ob.data, val, sub)
- else:
- data = parseDefaultType(key, val, sub)
-
- if data and key != 'Mesh':
- print( data )
- return
-
-#
-# parseDefaultType(typ, args, tokens):
+ global warnedVersion, MHX249, ifResult
+
+ for (key, val, sub) in tokens:
+ # print("Parse %s" % key)
+ data = None
+ if key == 'MHX':
+ if int(val[0]) != MAJOR_VERSION and int(val[1]) != MINOR_VERSION and not warnedVersion:
+ print("Warning: \nThis file was created with another version of MHX\n")
+ warnedVersion = True
+
+ elif key == 'MHX249':
+ MHX249 = eval(val[0])
+ print("Blender 2.49 compatibility mode is %s\n" % MHX249)
+
+ elif key == 'if':
+ try:
+ ifResult = eval(val[0])
+ except:
+ ifResult = False
+ if ifResult:
+ parse(sub)
+
+ elif key == 'elif':
+ if not ifResult:
+ try:
+ ifResult = eval(val[0])
+ except:
+ ifResult = False
+ if ifResult:
+ parse(sub)
+
+ elif key == 'else':
+ if not ifResult:
+ parse(sub)
+
+
+ elif MHX249:
+ pass
+
+ elif key == 'print':
+ msg = concatList(val)
+ print(msg)
+ elif key == 'warn':
+ msg = concatList(val)
+ print(msg)
+ elif key == 'error':
+ msg = concatList(val)
+ raise NameError(msg)
+ elif key == "Object":
+ parseObject(val, sub)
+ elif key == "Mesh":
+ data = parseMesh(val, sub)
+ elif key == "Curve":
+ data = parseCurve(val, sub)
+ elif key == "Lattice":
+ data = parseLattice(val, sub)
+ elif key == "Group":
+ data = parseGroup(val, sub)
+ elif key == "Armature":
+ data = parseArmature(val, sub)
+ elif key == "Pose":
+ data = parsePose(val, sub)
+ elif key == "Action":
+ data = parseAction(val, sub)
+ elif key == "Material":
+ data = parseMaterial(val, sub)
+ elif key == "Texture":
+ data = parseTexture(val, sub)
+ elif key == "Image":
+ data = parseImage(val, sub)
+ elif key == "Process":
+ parseProcess(val, sub)
+ elif key == 'AnimationData':
+ try:
+ ob = loadedData['Object'][val[0]]
+ except:
+ ob = None
+ if ob:
+ bpy.context.scene.objects.active = ob
+ parseAnimationData(ob, sub)
+ elif key == 'ShapeKeys':
+ try:
+ ob = loadedData['Object'][val[0]]
+ except:
+ ob = None
+ if ob:
+ bpy.context.scene.objects.active = ob
+ parseShapeKeys(ob, ob.data, val, sub)
+ else:
+ data = parseDefaultType(key, val, sub)
+
+ if data and key != 'Mesh':
+ print( data )
+ return
+
+#
+# parseDefaultType(typ, args, tokens):
#
def parseDefaultType(typ, args, tokens):
- global todo
+ global todo
- name = args[0]
- data = None
- expr = "bpy.data.%s.new('%s')" % (Plural[typ], name)
- print(expr)
- data = eval(expr)
- print(" ok", data)
+ name = args[0]
+ data = None
+ expr = "bpy.data.%s.new('%s')" % (Plural[typ], name)
+ print(expr)
+ data = eval(expr)
+ print(" ok", data)
- bpyType = typ.capitalize()
- print(bpyType, name, data)
- loadedData[bpyType][name] = data
- if data == None:
- return None
+ bpyType = typ.capitalize()
+ print(bpyType, name, data)
+ loadedData[bpyType][name] = data
+ if data == None:
+ return None
- for (key, val, sub) in tokens:
- #print("%s %s" % (key, val))
- defaultKey(key, val, sub, 'data', [], globals(), locals())
- print("Done ", data)
- return data
-
+ for (key, val, sub) in tokens:
+ #print("%s %s" % (key, val))
+ defaultKey(key, val, sub, 'data', [], globals(), locals())
+ print("Done ", data)
+ return data
+
#
-# concatList(elts)
+# concatList(elts)
#
def concatList(elts):
- string = ""
- for elt in elts:
- string += " %s" % elt
- return string
+ string = ""
+ for elt in elts:
+ string += " %s" % elt
+ return string
#
-# parseAction(args, tokens):
-# parseFCurve(fcu, args, tokens):
-# parseKeyFramePoint(pt, args, tokens):
+# parseAction(args, tokens):
+# parseFCurve(fcu, args, tokens):
+# parseKeyFramePoint(pt, args, tokens):
#
def parseAction(args, tokens):
- name = args[0]
- if invalid(args[1]):
- return
-
- ob = bpy.context.object
- bpy.ops.object.mode_set(mode='POSE')
- if ob.animation_data:
- ob.animation_data.action = None
- created = {}
- for (key, val, sub) in tokens:
- if key == 'FCurve':
- prepareActionFCurve(ob, created, val, sub)
-
- act = ob.animation_data.action
- loadedData['Action'][name] = act
- if act == None:
- print("Ignoring action %s" % name)
- return act
- act.name = name
- print("Action", name, act, ob)
-
- for (key, val, sub) in tokens:
- if key == 'FCurve':
- fcu = parseActionFCurve(act, ob, val, sub)
- else:
- defaultKey(key, val, sub, 'act', [], globals(), locals())
- ob.animation_data.action = None
- bpy.ops.object.mode_set(mode='OBJECT')
- return act
-
-def prepareActionFCurve(ob, created, args, tokens):
- dataPath = args[0]
- index = args[1]
- (expr, channel) = channelFromDataPath(dataPath, index)
- try:
- if channel in created[expr]:
- return
- else:
- created[expr].append(channel)
- except:
- created[expr] = [channel]
-
- times = []
- for (key, val, sub) in tokens:
- if key == 'kp':
- times.append(int(val[0]))
-
- try:
- data = eval(expr)
- except:
- print("Ignoring illegal expression: %s" % expr)
- return
-
- n = 0
- for t in times:
- #bpy.context.scene.current_frame = t
- bpy.ops.anim.change_frame(frame = t)
- try:
- data.keyframe_insert(channel)
- n += 1
- except:
- pass
- #print("failed", data, expr, channel)
- if n != len(times):
- print("Mismatch", n, len(times), expr, channel)
- return
+ name = args[0]
+ if invalid(args[1]):
+ return
+
+ ob = bpy.context.object
+ bpy.ops.object.mode_set(mode='POSE')
+ if ob.animation_data:
+ ob.animation_data.action = None
+ created = {}
+ for (key, val, sub) in tokens:
+ if key == 'FCurve':
+ prepareActionFCurve(ob, created, val, sub)
+
+ act = ob.animation_data.action
+ loadedData['Action'][name] = act
+ if act == None:
+ print("Ignoring action %s" % name)
+ return act
+ act.name = name
+ print("Action", name, act, ob)
+
+ for (key, val, sub) in tokens:
+ if key == 'FCurve':
+ fcu = parseActionFCurve(act, ob, val, sub)
+ else:
+ defaultKey(key, val, sub, 'act', [], globals(), locals())
+ ob.animation_data.action = None
+ bpy.ops.object.mode_set(mode='OBJECT')
+ return act
+
+def prepareActionFCurve(ob, created, args, tokens):
+ dataPath = args[0]
+ index = args[1]
+ (expr, channel) = channelFromDataPath(dataPath, index)
+ try:
+ if channel in created[expr]:
+ return
+ else:
+ created[expr].append(channel)
+ except:
+ created[expr] = [channel]
+
+ times = []
+ for (key, val, sub) in tokens:
+ if key == 'kp':
+ times.append(int(val[0]))
+
+ try:
+ data = eval(expr)
+ except:
+ print("Ignoring illegal expression: %s" % expr)
+ return
+
+ n = 0
+ for t in times:
+ #bpy.context.scene.current_frame = t
+ bpy.ops.anim.change_frame(frame = t)
+ try:
+ data.keyframe_insert(channel)
+ n += 1
+ except:
+ pass
+ #print("failed", data, expr, channel)
+ if n != len(times):
+ print("Mismatch", n, len(times), expr, channel)
+ return
def channelFromDataPath(dataPath, index):
- words = dataPath.split(']')
- if len(words) == 1:
- # location
- expr = "ob"
- channel = dataPath
- elif len(words) == 2:
- # pose.bones["tongue"].location
- expr = "ob.%s]" % (words[0])
- cwords = words[1].split('.')
- channel = cwords[1]
- elif len(words) == 3:
- # pose.bones["brow.R"]["mad"]
- expr = "ob.%s]" % (words[0])
- cwords = words[1].split('"')
- channel = cwords[1]
- # print(expr, channel, index)
- return (expr, channel)
+ words = dataPath.split(']')
+ if len(words) == 1:
+ # location
+ expr = "ob"
+ channel = dataPath
+ elif len(words) == 2:
+ # pose.bones["tongue"].location
+ expr = "ob.%s]" % (words[0])
+ cwords = words[1].split('.')
+ channel = cwords[1]
+ elif len(words) == 3:
+ # pose.bones["brow.R"]["mad"]
+ expr = "ob.%s]" % (words[0])
+ cwords = words[1].split('"')
+ channel = cwords[1]
+ # print(expr, channel, index)
+ return (expr, channel)
def parseActionFCurve(act, ob, args, tokens):
- dataPath = args[0]
- index = args[1]
- (expr, channel) = channelFromDataPath(dataPath, index)
- index = int(args[1])
-
- success = False
- for fcu in act.fcurves:
- (expr1, channel1) = channelFromDataPath(fcu.data_path, fcu.array_index)
- if expr1 == expr and channel1 == channel and fcu.array_index == index:
- success = True
- break
- if not success:
- return None
-
- n = 0
- for (key, val, sub) in tokens:
- if key == 'kp':
- try:
- pt = fcu.keyframe_points[n]
- pt.use_interpolation = 'LINEAR'
- pt = parseKeyFramePoint(pt, val, sub)
- n += 1
- except:
- pass
- #print(tokens)
- #raise NameError("kp", fcu, n, len(fcu.keyframe_points), val)
- else:
- defaultKey(key, val, sub, 'fcu', [], globals(), locals())
- return fcu
+ dataPath = args[0]
+ index = args[1]
+ (expr, channel) = channelFromDataPath(dataPath, index)
+ index = int(args[1])
+
+ success = False
+ for fcu in act.fcurves:
+ (expr1, channel1) = channelFromDataPath(fcu.data_path, fcu.array_index)
+ if expr1 == expr and channel1 == channel and fcu.array_index == index:
+ success = True
+ break
+ if not success:
+ return None
+
+ n = 0
+ for (key, val, sub) in tokens:
+ if key == 'kp':
+ try:
+ pt = fcu.keyframe_points[n]
+ pt.use_interpolation = 'LINEAR'
+ pt = parseKeyFramePoint(pt, val, sub)
+ n += 1
+ except:
+ pass
+ #print(tokens)
+ #raise NameError("kp", fcu, n, len(fcu.keyframe_points), val)
+ else:
+ defaultKey(key, val, sub, 'fcu', [], globals(), locals())
+ return fcu
def parseKeyFramePoint(pt, args, tokens):
- pt.co = (float(args[0]), float(args[1]))
- if len(args) > 2:
- pt.handle_left = (float(args[2]), float(args[3]))
- pt.handle_right = (float(args[3]), float(args[5]))
- return pt
+ pt.co = (float(args[0]), float(args[1]))
+ if len(args) > 2:
+ pt.handle_left = (float(args[2]), float(args[3]))
+ pt.handle_right = (float(args[3]), float(args[5]))
+ return pt
#
-# parseAnimationData(rna, tokens):
-# parseDriver(drv, args, tokens):
-# parseDriverVariable(var, args, tokens):
+# parseAnimationData(rna, tokens):
+# parseDriver(drv, args, tokens):
+# parseDriverVariable(var, args, tokens):
#
def parseAnimationData(rna, tokens):
- if 0 and toggle & T_MHX:
- return
- if rna.animation_data == None:
- rna.animation_data_create()
- adata = rna.animation_data
- for (key, val, sub) in tokens:
- if key == 'FCurve':
- fcu = parseAnimDataFCurve(adata, rna, val, sub)
- else:
- defaultKey(key, val, sub, 'adata', [], globals(), locals())
- return adata
+ if 0 and toggle & T_MHX:
+ return
+ if rna.animation_data == None:
+ rna.animation_data_create()
+ adata = rna.animation_data
+ for (key, val, sub) in tokens:
+ if key == 'FCurve':
+ fcu = parseAnimDataFCurve(adata, rna, val, sub)
+ else:
+ defaultKey(key, val, sub, 'adata', [], globals(), locals())
+ return adata
def parseAnimDataFCurve(adata, rna, args, tokens):
- if invalid(args[2]):
- return
- dataPath = args[0]
- index = int(args[1])
- # print("parseAnimDataFCurve", adata, dataPath, index)
- for (key, val, sub) in tokens:
- if key == 'Driver':
- fcu = parseDriver(adata, dataPath, index, rna, val, sub)
- elif key == 'FModifier':
- parseFModifier(fcu, val, sub)
- else:
- defaultKey(key, val, sub, 'fcu', [], globals(), locals())
- return fcu
+ if invalid(args[2]):
+ return
+ dataPath = args[0]
+ index = int(args[1])
+ # print("parseAnimDataFCurve", adata, dataPath, index)
+ for (key, val, sub) in tokens:
+ if key == 'Driver':
+ fcu = parseDriver(adata, dataPath, index, rna, val, sub)
+ elif key == 'FModifier':
+ parseFModifier(fcu, val, sub)
+ else:
+ defaultKey(key, val, sub, 'fcu', [], globals(), locals())
+ return fcu
"""
- fcurve = con.driver_add("influence", 0)
- driver = fcurve.driver
- driver.type = 'AVERAGE'
+ fcurve = con.driver_add("influence", 0)
+ driver = fcurve.driver
+ driver.type = 'AVERAGE'
"""
def parseDriver(adata, dataPath, index, rna, args, tokens):
- if dataPath[-1] == ']':
- words = dataPath.split(']')
- expr = "rna." + words[0] + ']'
- pwords = words[1].split('"')
- prop = pwords[1]
- # print("prop", expr, prop)
- bone = eval(expr)
- return None
- else:
- words = dataPath.split('.')
- channel = words[-1]
- expr = "rna"
- for n in range(len(words)-1):
- expr += "." + words[n]
- expr += ".driver_add('%s', index)" % channel
-
- # print("expr", rna, expr)
- fcu = eval(expr)
- drv = fcu.driver
- drv.type = args[0]
- for (key, val, sub) in tokens:
- if key == 'DriverVariable':
- var = parseDriverVariable(drv, rna, val, sub)
- else:
- defaultKey(key, val, sub, 'drv', [], globals(), locals())
- return fcu
+ if dataPath[-1] == ']':
+ words = dataPath.split(']')
+ expr = "rna." + words[0] + ']'
+ pwords = words[1].split('"')
+ prop = pwords[1]
+ # print("prop", expr, prop)
+ bone = eval(expr)
+ return None
+ else:
+ words = dataPath.split('.')
+ channel = words[-1]
+ expr = "rna"
+ for n in range(len(words)-1):
+ expr += "." + words[n]
+ expr += ".driver_add('%s', index)" % channel
+
+ # print("expr", rna, expr)
+ fcu = eval(expr)
+ drv = fcu.driver
+ drv.type = args[0]
+ for (key, val, sub) in tokens:
+ if key == 'DriverVariable':
+ var = parseDriverVariable(drv, rna, val, sub)
+ else:
+ defaultKey(key, val, sub, 'drv', [], globals(), locals())
+ return fcu
def parseDriverVariable(drv, rna, args, tokens):
- var = drv.variables.new()
- var.name = args[0]
- var.type = args[1]
- nTarget = 0
- # print("var", var, var.name, var.type)
- for (key, val, sub) in tokens:
- if key == 'Target':
- parseDriverTarget(var, nTarget, rna, val, sub)
- nTarget += 1
- else:
- defaultKey(key, val, sub, 'var', [], globals(), locals())
- return var
+ var = drv.variables.new()
+ var.name = args[0]
+ var.type = args[1]
+ nTarget = 0
+ # print("var", var, var.name, var.type)
+ for (key, val, sub) in tokens:
+ if key == 'Target':
+ parseDriverTarget(var, nTarget, rna, val, sub)
+ nTarget += 1
+ else:
+ defaultKey(key, val, sub, 'var', [], globals(), locals())
+ return var
def parseFModifier(fcu, args, tokens):
- #fmod = fcu.modifiers.new()
- fmod = fcu.modifiers[0]
- #fmod.type = args[0]
- #print("fmod", fmod, fmod.type)
- for (key, val, sub) in tokens:
- defaultKey(key, val, sub, 'fmod', [], globals(), locals())
- return fmod
+ #fmod = fcu.modifiers.new()
+ fmod = fcu.modifiers[0]
+ #fmod.type = args[0]
+ #print("fmod", fmod, fmod.type)
+ for (key, val, sub) in tokens:
+ defaultKey(key, val, sub, 'fmod', [], globals(), locals())
+ return fmod
"""
- var = driver.variables.new()
- var.name = target_bone
- var.targets[0].id_type = 'OBJECT'
- var.targets[0].id = obj
- var.targets[0].rna_path = driver_path
+ var = driver.variables.new()
+ var.name = target_bone
+ var.targets[0].id_type = 'OBJECT'
+ var.targets[0].id = obj
+ var.targets[0].rna_path = driver_path
"""
def parseDriverTarget(var, nTarget, rna, args, tokens):
- targ = var.targets[nTarget]
- # targ.rna_path = args[0]
- # targ.id_type = args[1]
- targ.id = loadedData['Object'][args[0]]
- for (key, val, sub) in tokens:
- defaultKey(key, val, sub, 'targ', [], globals(), locals())
- #print("Targ", targ, targ.id, targ.data_path, targ.id_type, targ.bone_target, targ.use_local_space_transform)
- return targ
+ targ = var.targets[nTarget]
+ # targ.rna_path = args[0]
+ # targ.id_type = args[1]
+ targ.id = loadedData['Object'][args[0]]
+ for (key, val, sub) in tokens:
+ defaultKey(key, val, sub, 'targ', [], globals(), locals())
+ #print("Targ", targ, targ.id, targ.data_path, targ.id_type, targ.bone_target, targ.use_local_space_transform)
+ return targ
-
+
#
-# parseMaterial(args, ext, tokens):
-# parseMTex(mat, args, tokens):
-# parseTexture(args, tokens):
+# parseMaterial(args, ext, tokens):
+# parseMTex(mat, args, tokens):
+# parseTexture(args, tokens):
#
def parseMaterial(args, tokens):
- global todo
- name = args[0]
- #print("Parse material "+name)
- mat = bpy.data.materials.new(name)
- if mat == None:
- return None
- loadedData['Material'][name] = mat
- #print("Material %s %s %s" % (mat, name, loadedData['Material'][name]))
- for (key, val, sub) in tokens:
- if key == 'MTex':
- parseMTex(mat, val, sub)
- elif key == 'Ramp':
- parseRamp(mat, val, sub)
- elif key == 'SSS':
- parseSSS(mat, val, sub)
- elif key == 'Strand':
- parseStrand(mat, val, sub)
- else:
- exclude = ['specular_intensity', 'use_tangent_shading']
- defaultKey(key, val, sub, 'mat', [], globals(), locals())
- #print("Done ", mat)
-
- return mat
+ global todo
+ name = args[0]
+ #print("Parse material "+name)
+ mat = bpy.data.materials.new(name)
+ if mat == None:
+ return None
+ loadedData['Material'][name] = mat
+ #print("Material %s %s %s" % (mat, name, loadedData['Material'][name]))
+ for (key, val, sub) in tokens:
+ if key == 'MTex':
+ parseMTex(mat, val, sub)
+ elif key == 'Ramp':
+ parseRamp(mat, val, sub)
+ elif key == 'SSS':
+ parseSSS(mat, val, sub)
+ elif key == 'Strand':
+ parseStrand(mat, val, sub)
+ else:
+ exclude = ['specular_intensity', 'use_tangent_shading']
+ defaultKey(key, val, sub, 'mat', [], globals(), locals())
+ #print("Done ", mat)
+
+ return mat
def parseMTex(mat, args, tokens):
- global todo
- index = int(args[0])
- texname = args[1]
- texco = args[2]
- mapto = args[3]
+ global todo
+ index = int(args[0])
+ texname = args[1]
+ texco = args[2]
+ mapto = args[3]
- mat.add_texture(texture = loadedData['Texture'][texname], texture_coordinates = texco, map_to = mapto)
- mtex = mat.texture_slots[index]
- #mat.use_textures[index] = Bool(use)
+ mat.add_texture(texture = loadedData['Texture'][texname], texture_coordinates = texco, map_to = mapto)
+ mtex = mat.texture_slots[index]
+ #mat.use_textures[index] = Bool(use)
- for (key, val, sub) in tokens:
- defaultKey(key, val, sub, "mtex", [], globals(), locals())
+ for (key, val, sub) in tokens:
+ defaultKey(key, val, sub, "mtex", [], globals(), locals())
- return mtex
+ return mtex
def parseTexture(args, tokens):
- global todo
- if verbosity > 2:
- print( "Parsing texture %s" % args )
- tex = bpy.data.textures.new(name=args[0], type=args[1])
- loadedData['Texture'][name] = tex
-
- for (key, val, sub) in tokens:
- if key == 'Image':
- try:
- imgName = val[0]
- img = loadedData['Image'][imgName]
- tex.image = img
- except:
- msg = "Unable to load image '%s'" % val[0]
- elif key == 'Ramp':
- parseRamp(tex, val, sub)
- else:
- defaultKey(key, val, sub, "tex", ['use_nodes', 'use_textures', 'contrast'], globals(), locals())
-
- return tex
+ global todo
+ if verbosity > 2:
+ print( "Parsing texture %s" % args )
+ tex = bpy.data.textures.new(name=args[0], type=args[1])
+ loadedData['Texture'][name] = tex
+
+ for (key, val, sub) in tokens:
+ if key == 'Image':
+ try:
+ imgName = val[0]
+ img = loadedData['Image'][imgName]
+ tex.image = img
+ except:
+ msg = "Unable to load image '%s'" % val[0]
+ elif key == 'Ramp':
+ parseRamp(tex, val, sub)
+ else:
+ defaultKey(key, val, sub, "tex", ['use_nodes', 'use_textures', 'contrast'], globals(), locals())
+
+ return tex
def parseRamp(data, args, tokens):
- nvar = "data.%s" % args[0]
- use = "data.use_%s = True" % args[0]
- exec(use)
- ramp = eval(nvar)
- elts = ramp.elements
- n = 0
- for (key, val, sub) in tokens:
- # print("Ramp", key, val)
- if key == 'Element':
- elts[n].color = eval(val[0])
- elts[n].position = eval(val[1])
- n += 1
- else:
- defaultKey(key, val, sub, "tex", ['use_nodes', 'use_textures', 'contrast'], globals(), locals())
-
+ nvar = "data.%s" % args[0]
+ use = "data.use_%s = True" % args[0]
+ exec(use)
+ ramp = eval(nvar)
+ elts = ramp.elements
+ n = 0
+ for (key, val, sub) in tokens:
+ # print("Ramp", key, val)
+ if key == 'Element':
+ elts[n].color = eval(val[0])
+ elts[n].position = eval(val[1])
+ n += 1
+ else:
+ defaultKey(key, val, sub, "tex", ['use_nodes', 'use_textures', 'contrast'], globals(), locals())
+
def parseSSS(mat, args, tokens):
- sss = mat.subsurface_scattering
- for (key, val, sub) in tokens:
- defaultKey(key, val, sub, "sss", [], globals(), locals())
+ sss = mat.subsurface_scattering
+ for (key, val, sub) in tokens:
+ defaultKey(key, val, sub, "sss", [], globals(), locals())
def parseStrand(mat, args, tokens):
- strand = mat.strand
- for (key, val, sub) in tokens:
- defaultKey(key, val, sub, "strand", [], globals(), locals())
+ strand = mat.strand
+ for (key, val, sub) in tokens:
+ defaultKey(key, val, sub, "strand", [], globals(), locals())
#
-# doLoadImage(filepath):
-# loadImage(filepath):
-# parseImage(args, tokens):
+# doLoadImage(filepath):
+# loadImage(filepath):
+# parseImage(args, tokens):
#
-def doLoadImage(filepath):
- path1 = os.path.expanduser(filepath)
- file1 = os.path.realpath(path1)
- if os.path.isfile(file1):
- print( "Found file "+file1 )
- try:
- img = bpy.data.images.load(file1)
- return img
- except:
- print( "Cannot read image" )
- return None
- else:
- print( "No file "+file1 )
- return None
+def doLoadImage(filepath):
+ path1 = os.path.expanduser(filepath)
+ file1 = os.path.realpath(path1)
+ if os.path.isfile(file1):
+ print( "Found file "+file1 )
+ try:
+ img = bpy.data.images.load(file1)
+ return img
+ except:
+ print( "Cannot read image" )
+ return None
+ else:
+ print( "No file "+file1 )
+ return None
def loadImage(filepath):
- global TexDir, warnedTextureDir, loadedData
-
- texDir = os.path.expanduser(TexDir)
- path1 = os.path.expanduser(filepath)
- file1 = os.path.realpath(path1)
- (path, filename) = os.path.split(file1)
- (name, ext) = os.path.splitext(filename)
- print( "Loading ", filepath, " = ", filename )
-
- # img = doLoadImage(texDir+"/"+name+".png")
- # if img:
- # return img
-
- img = doLoadImage(texDir+"/"+filename)
- if img:
- return img
-
- # img = doLoadImage(path+"/"+name+".png")
- # if img:
- # return img
-
- img = doLoadImage(path+"/"+filename)
- if img:
- return img
-
- if warnedTextureDir:
- return None
- warnedTextureDir = True
- return None
- TexDir = Draw.PupStrInput("TexDir? ", path, 100)
-
- texDir = os.path.expanduser(TexDir)
- img = doLoadImage(texDir+"/"+name+".png")
- if img:
- return img
-
- img = doLoadImage(TexDir+"/"+filename)
- return img
-
+ global TexDir, warnedTextureDir, loadedData
+
+ texDir = os.path.expanduser(TexDir)
+ path1 = os.path.expanduser(filepath)
+ file1 = os.path.realpath(path1)
+ (path, filename) = os.path.split(file1)
+ (name, ext) = os.path.splitext(filename)
+ print( "Loading ", filepath, " = ", filename )
+
+ # img = doLoadImage(texDir+"/"+name+".png")
+ # if img:
+ # return img
+
+ img = doLoadImage(texDir+"/"+filename)
+ if img:
+ return img
+
+ # img = doLoadImage(path+"/"+name+".png")
+ # if img:
+ # return img
+
+ img = doLoadImage(path+"/"+filename)
+ if img:
+ return img
+
+ if warnedTextureDir:
+ return None
+ warnedTextureDir = True
+ return None
+ TexDir = Draw.PupStrInput("TexDir? ", path, 100)
+
+ texDir = os.path.expanduser(TexDir)
+ img = doLoadImage(texDir+"/"+name+".png")
+ if img:
+ return img
+
+ img = doLoadImage(TexDir+"/"+filename)
+ return img
+
def parseImage(args, tokens):
- global todo
- imgName = args[0]
- img = None
- for (key, val, sub) in tokens:
- if key == 'Filename':
- filename = val[0]
- for n in range(1,len(val)):
- filename += " " + val[n]
- img = loadImage(filename)
- if img == None:
- return None
- img.name = imgName
- else:
- defaultKey(key, val, sub, "img", ['depth', 'is_dirty', 'has_data', 'size', 'type'], globals(), locals())
- print ("Image %s" % img )
- loadedData['Image'][imgName] = img
- return img
-
-#
-# parseObject(args, tokens):
-# createObject(type, name, data, datName):
-# createObjectAndData(args, typ):
-#
-
+ global todo
+ imgName = args[0]
+ img = None
+ for (key, val, sub) in tokens:
+ if key == 'Filename':
+ filename = val[0]
+ for n in range(1,len(val)):
+ filename += " " + val[n]
+ img = loadImage(filename)
+ if img == None:
+ return None
+ img.name = imgName
+ else:
+ defaultKey(key, val, sub, "img", ['depth', 'is_dirty', 'has_data', 'size', 'type'], globals(), locals())
+ print ("Image %s" % img )
+ loadedData['Image'][imgName] = img
+ return img
+
+#
+# parseObject(args, tokens):
+# createObject(type, name, data, datName):
+# createObjectAndData(args, typ):
+#
+
def parseObject(args, tokens):
- if verbosity > 2:
- print( "Parsing object %s" % args )
- name = args[0]
- typ = args[1]
- datName = args[2]
- try:
- data = loadedData[typ.capitalize()][datName]
- except:
- data = None
-
- if data == None and typ != 'EMPTY':
- print("Failed to find data: %s %s %s" % (name, typ, datName))
- return
-
- try:
- ob = loadedData['Object'][name]
- bpy.context.scene.objects.active = ob
- #print("Found data")
- except:
- ob = createObject(typ, name, data, datName)
- if bpy.context.object != ob:
- print("Context", ob, bpy.context.object, bpy.context.scene.objects.active)
- # ob = foo
-
- for (key, val, sub) in tokens:
- if key == 'Modifier':
- parseModifier(ob, val, sub)
- elif key == 'Constraint':
- parseConstraint(ob.constraints, val, sub)
- elif key == 'AnimationData':
- if eval(val[0]):
- parseAnimationData(ob, sub)
- elif key == 'ParticleSystem':
- parseParticleSystem(ob, val, sub)
- else:
- defaultKey(key, val, sub, "ob", ['type', 'data'], globals(), locals())
-
- # Needed for updating layers
- bpy.ops.object.mode_set(mode='EDIT')
- bpy.ops.object.mode_set(mode='OBJECT')
- return
+ if verbosity > 2:
+ print( "Parsing object %s" % args )
+ name = args[0]
+ typ = args[1]
+ datName = args[2]
+ try:
+ data = loadedData[typ.capitalize()][datName]
+ except:
+ data = None
+
+ if data == None and typ != 'EMPTY':
+ print("Failed to find data: %s %s %s" % (name, typ, datName))
+ return
+
+ try:
+ ob = loadedData['Object'][name]
+ bpy.context.scene.objects.active = ob
+ #print("Found data")
+ except:
+ ob = createObject(typ, name, data, datName)
+ if bpy.context.object != ob:
+ print("Context", ob, bpy.context.object, bpy.context.scene.objects.active)
+ # ob = foo
+
+ for (key, val, sub) in tokens:
+ if key == 'Modifier':
+ parseModifier(ob, val, sub)
+ elif key == 'Constraint':
+ parseConstraint(ob.constraints, val, sub)
+ elif key == 'AnimationData':
+ if eval(val[0]):
+ parseAnimationData(ob, sub)
+ elif key == 'ParticleSystem':
+ parseParticleSystem(ob, val, sub)
+ else:
+ defaultKey(key, val, sub, "ob", ['type', 'data'], globals(), locals())
+
+ # Needed for updating layers
+ bpy.ops.object.mode_set(mode='EDIT')
+ bpy.ops.object.mode_set(mode='OBJECT')
+ return
def createObject(typ, name, data, datName):
- #print( "Creating object %s %s %s" % (typ, name, data) )
- ob = bpy.data.objects.new(name, data)
- loadedData[typ][datName] = data
- loadedData['Object'][name] = ob
- return ob
-
+ #print( "Creating object %s %s %s" % (typ, name, data) )
+ ob = bpy.data.objects.new(name, data)
+ loadedData[typ][datName] = data
+ loadedData['Object'][name] = ob
+ return ob
+
def linkObject(ob, data):
- #print("Data", data, ob.data)
- if data and ob.data == None:
- ob.data = data
- scn = bpy.context.scene
- scn.objects.link(ob)
- scn.objects.active = ob
- #print("Linked object", ob)
- #print("Scene", scn)
- #print("Active", scn.objects.active)
- #print("Context", bpy.context.object)
- return ob
+ #print("Data", data, ob.data)
+ if data and ob.data == None:
+ ob.data = data
+ scn = bpy.context.scene
+ scn.objects.link(ob)
+ scn.objects.active = ob
+ #print("Linked object", ob)
+ #print("Scene", scn)
+ #print("Active", scn.objects.active)
+ #print("Context", bpy.context.object)
+ return ob
def createObjectAndData(args, typ):
- datName = args[0]
- obName = args[1]
- bpy.ops.object.add(type=typ.upper())
- ob = bpy.context.object
- ob.name = obName
- ob.data.name = datName
- loadedData[typ][datName] = ob.data
- loadedData['Object'][obName] = ob
- return ob.data
+ datName = args[0]
+ obName = args[1]
+ bpy.ops.object.add(type=typ.upper())
+ ob = bpy.context.object
+ ob.name = obName
+ ob.data.name = datName
+ loadedData[typ][datName] = ob.data
+ loadedData['Object'][obName] = ob
+ return ob.data
#
-# parseModifier(ob, args, tokens):
+# parseModifier(ob, args, tokens):
#
def parseModifier(ob, args, tokens):
- name = args[0]
- typ = args[1]
- if typ == 'PARTICLE_SYSTEM':
- return None
- mod = ob.modifiers.new(name, typ)
- for (key, val, sub) in tokens:
- defaultKey(key, val, sub, 'mod', [], globals(), locals())
- return mod
+ name = args[0]
+ typ = args[1]
+ if typ == 'PARTICLE_SYSTEM':
+ return None
+ mod = ob.modifiers.new(name, typ)
+ for (key, val, sub) in tokens:
+ defaultKey(key, val, sub, 'mod', [], globals(), locals())
+ return mod
#
-# parseParticleSystem(ob, args, tokens):
-# parseParticles(particles, args, tokens):
-# parseParticle(par, args, tokens):
+# parseParticleSystem(ob, args, tokens):
+# parseParticles(particles, args, tokens):
+# parseParticle(par, args, tokens):
#
def parseParticleSystem(ob, args, tokens):
- print(ob, bpy.context.object)
- pss = ob.particle_systems
- print(pss, pss.values())
- name = args[0]
- typ = args[1]
- #psys = pss.new(name, typ)
- bpy.ops.object.particle_system_add()
- print(pss, pss.values())
- psys = pss[-1]
- psys.name = name
- psys.settings.type = typ
- loadedData['ParticleSystem'][name] = psys
- print("Psys", psys)
-
- for (key, val, sub) in tokens:
- if key == 'Particles':
- parseParticles(psys, val, sub)
- else:
- defaultKey(key, val, sub, 'psys', [], globals(), locals())
- return psys
+ print(ob, bpy.context.object)
+ pss = ob.particle_systems
+ print(pss, pss.values())
+ name = args[0]
+ typ = args[1]
+ #psys = pss.new(name, typ)
+ bpy.ops.object.particle_system_add()
+ print(pss, pss.values())
+ psys = pss[-1]
+ psys.name = name
+ psys.settings.type = typ
+ loadedData['ParticleSystem'][name] = psys
+ print("Psys", psys)
+
+ for (key, val, sub) in tokens:
+ if key == 'Particles':
+ parseParticles(psys, val, sub)
+ else:
+ defaultKey(key, val, sub, 'psys', [], globals(), locals())
+ return psys
def parseParticles(psys, args, tokens):
- particles = psys.particles
- bpy.ops.particle.particle_edit_toggle()
- n = 0
- for (key, val, sub) in tokens:
- if key == 'Particle':
- parseParticle(particles[n], val, sub)
- n += 1
- else:
- for par in particles:
- defaultKey(key, val, sub, 'par', [], globals(), locals())
- bpy.ops.particle.particle_edit_toggle()
- return particles
+ particles = psys.particles
+ bpy.ops.particle.particle_edit_toggle()
+ n = 0
+ for (key, val, sub) in tokens:
+ if key == 'Particle':
+ parseParticle(particles[n], val, sub)
+ n += 1
+ else:
+ for par in particles:
+ defaultKey(key, val, sub, 'par', [], globals(), locals())
+ bpy.ops.particle.particle_edit_toggle()
+ return particles
def parseParticle(par, args, tokens):
- n = 0
- for (key, val, sub) in tokens:
- if key == 'h':
- h = par.is_hair[n]
- h.location = eval(val[0])
- h.time = int(val[1])
- h.weight = float(val[2])
- n += 1
- elif key == 'location':
- par.location = eval(val[0])
- return
+ n = 0
+ for (key, val, sub) in tokens:
+ if key == 'h':
+ h = par.is_hair[n]
+ h.location = eval(val[0])
+ h.time = int(val[1])
+ h.weight = float(val[2])
+ n += 1
+ elif key == 'location':
+ par.location = eval(val[0])
+ return
#
-# unpackList(list_of_tuples):
+# unpackList(list_of_tuples):
#
def unpackList(list_of_tuples):
- l = []
- for t in list_of_tuples:
- l.extend(t)
- return l
+ l = []
+ for t in list_of_tuples:
+ l.extend(t)
+ return l
#
-# parseMesh (args, tokens):
+# parseMesh (args, tokens):
#
def parseMesh (args, tokens):
- global todo
- if verbosity > 2:
- print( "Parsing mesh %s" % args )
-
- mename = args[0]
- obname = args[1]
- me = bpy.data.meshes.new(mename)
- ob = createObject('Mesh', obname, me, mename)
-
- verts = []
- edges = []
- faces = []
- vertsTex = []
- texFaces = []
-
- for (key, val, sub) in tokens:
- if key == 'Verts':
- verts = parseVerts(sub)
- elif key == 'Edges':
- edges = parseEdges(sub)
- elif key == 'Faces':
- faces = parseFaces(sub)
-
- if faces:
- #x = me.from_pydata(verts, [], faces)
- me.vertices.add(len(verts))
- me.faces.add(len(faces))
- me.vertices.foreach_set("co", unpackList(verts))
- me.faces.foreach_set("vertices_raw", unpackList(faces))
- else:
- #x = me.from_pydata(verts, edges, [])
- me.vertices.add(len(verts))
- me.edges.add(len(edges))
- me.vertices.foreach_set("co", unpackList(verts))
- me.edges.foreach_set("vertices", unpackList(edges))
- #print(x)
- me.update()
- #print(me)
- linkObject(ob, me)
-
- mats = []
- for (key, val, sub) in tokens:
- if key in ('Verts', 'Edges'):
- pass
- elif key == 'Faces':
- parseFaces2(sub, me)
- elif key == 'MeshTextureFaceLayer':
- parseUvTexture(val, sub, me)
- elif key == 'MeshColorLayer':
- parseVertColorLayer(val, sub, me)
- elif key == 'VertexGroup':
- parseVertexGroup(ob, me, val, sub)
- elif key == 'ShapeKeys':
- parseShapeKeys(ob, me, val, sub)
- elif key == 'Material':
- try:
- me.materials.link(loadedData['Material'][val[0]])
- except:
- print("Could not add material", val[0])
- else:
- defaultKey(key, val, sub, "me", [], globals(), locals())
-
- return me
-
-#
-# parseVerts(tokens):
-# parseEdges(tokens):
-# parseFaces(tokens):
-# parseFaces2(tokens, me):
+ global todo
+ if verbosity > 2:
+ print( "Parsing mesh %s" % args )
+
+ mename = args[0]
+ obname = args[1]
+ me = bpy.data.meshes.new(mename)
+ ob = createObject('Mesh', obname, me, mename)
+
+ verts = []
+ edges = []
+ faces = []
+ vertsTex = []
+ texFaces = []
+
+ for (key, val, sub) in tokens:
+ if key == 'Verts':
+ verts = parseVerts(sub)
+ elif key == 'Edges':
+ edges = parseEdges(sub)
+ elif key == 'Faces':
+ faces = parseFaces(sub)
+
+ if faces:
+ #x = me.from_pydata(verts, [], faces)
+ me.vertices.add(len(verts))
+ me.faces.add(len(faces))
+ me.vertices.foreach_set("co", unpackList(verts))
+ me.faces.foreach_set("vertices_raw", unpackList(faces))
+ else:
+ #x = me.from_pydata(verts, edges, [])
+ me.vertices.add(len(verts))
+ me.edges.add(len(edges))
+ me.vertices.foreach_set("co", unpackList(verts))
+ me.edges.foreach_set("vertices", unpackList(edges))
+ #print(x)
+ me.update()
+ #print(me)
+ linkObject(ob, me)
+
+ mats = []
+ for (key, val, sub) in tokens:
+ if key in ('Verts', 'Edges'):
+ pass
+ elif key == 'Faces':
+ parseFaces2(sub, me)
+ elif key == 'MeshTextureFaceLayer':
+ parseUvTexture(val, sub, me)
+ elif key == 'MeshColorLayer':
+ parseVertColorLayer(val, sub, me)
+ elif key == 'VertexGroup':
+ parseVertexGroup(ob, me, val, sub)
+ elif key == 'ShapeKeys':
+ parseShapeKeys(ob, me, val, sub)
+ elif key == 'Material':
+ try:
+ me.materials.link(loadedData['Material'][val[0]])
+ except:
+ print("Could not add material", val[0])
+ else:
+ defaultKey(key, val, sub, "me", [], globals(), locals())
+
+ return me
+
+#
+# parseVerts(tokens):
+# parseEdges(tokens):
+# parseFaces(tokens):
+# parseFaces2(tokens, me):
#
def parseVerts(tokens):
- verts = []
- for (key, val, sub) in tokens:
- if key == 'v':
- verts.append( (float(val[0]), float(val[1]), float(val[2])) )
- return verts
+ verts = []
+ for (key, val, sub) in tokens:
+ if key == 'v':
+ verts.append( (float(val[0]), float(val[1]), float(val[2])) )
+ return verts
def parseEdges(tokens):
- edges = []
- for (key, val, sub) in tokens:
- if key == 'e':
- edges.append((int(val[0]), int(val[1])))
- return edges
-
-def parseFaces(tokens):
- faces = []
- for (key, val, sub) in tokens:
- if key == 'f':
- if len(val) == 3:
- face = [int(val[0]), int(val[1]), int(val[2]), 0]
- elif len(val) == 4:
- face = [int(val[0]), int(val[1]), int(val[2]), int(val[3])]
- faces.append(face)
- return faces
-
-def parseFaces2(tokens, me):
- n = 0
- for (key, val, sub) in tokens:
- if key == 'ft':
- f = me.faces[n]
- f.material_index = int(val[0])
- f.use_smooth = int(val[1])
- n += 1
- elif key == 'ftall':
- mat = int(val[0])
- smooth = int(val[1])
- for f in me.faces:
- f.material_index = mat
- f.use_smooth = smooth
- return
-
-
-#
-# parseUvTexture(args, tokens, me):
-# parseUvTexData(args, tokens, uvdata):
+ edges = []
+ for (key, val, sub) in tokens:
+ if key == 'e':
+ edges.append((int(val[0]), int(val[1])))
+ return edges
+
+def parseFaces(tokens):
+ faces = []
+ for (key, val, sub) in tokens:
+ if key == 'f':
+ if len(val) == 3:
+ face = [int(val[0]), int(val[1]), int(val[2]), 0]
+ elif len(val) == 4:
+ face = [int(val[0]), int(val[1]), int(val[2]), int(val[3])]
+ faces.append(face)
+ return faces
+
+def parseFaces2(tokens, me):
+ n = 0
+ for (key, val, sub) in tokens:
+ if key == 'ft':
+ f = me.faces[n]
+ f.material_index = int(val[0])
+ f.use_smooth = int(val[1])
+ n += 1
+ elif key == 'ftall':
+ mat = int(val[0])
+ smooth = int(val[1])
+ for f in me.faces:
+ f.material_index = mat
+ f.use_smooth = smooth
+ return
+
+
+#
+# parseUvTexture(args, tokens, me):
+# parseUvTexData(args, tokens, uvdata):
#
def parseUvTexture(args, tokens, me):
- name = args[0]
- uvtex = me.uv_textures.new(name)
- loadedData['MeshTextureFaceLayer'][name] = uvtex
- for (key, val, sub) in tokens:
- if key == 'Data':
- parseUvTexData(val, sub, uvtex.data)
- else:
- defaultKey(key, val, sub, "uvtex", [], globals(), locals())
- return
+ name = args[0]
+ uvtex = me.uv_textures.new(name)
+ loadedData['MeshTextureFaceLayer'][name] = uvtex
+ for (key, val, sub) in tokens:
+ if key == 'Data':
+ parseUvTexData(val, sub, uvtex.data)
+ else:
+ defaultKey(key, val, sub, "uvtex", [], globals(), locals())
+ return
def parseUvTexData(args, tokens, data):
- n = 0
- for (key, val, sub) in tokens:
- if key == 'vt':
- data[n].uv1 = (float(val[0]), float(val[1]))
- data[n].uv2 = (float(val[2]), float(val[3]))
- data[n].uv3 = (float(val[4]), float(val[5]))
- if len(val) > 6:
- data[n].uv4 = (float(val[6]), float(val[7]))
- n += 1
- else:
- pass
- #for i in range(n):
- # defaultKey(key, val, sub, "data[i]", [], globals(), locals())
- return
-
-#
-# parseVertColorLayer(args, tokens, me):
-# parseVertColorData(args, tokens, data):
+ n = 0
+ for (key, val, sub) in tokens:
+ if key == 'vt':
+ data[n].uv1 = (float(val[0]), float(val[1]))
+ data[n].uv2 = (float(val[2]), float(val[3]))
+ data[n].uv3 = (float(val[4]), float(val[5]))
+ if len(val) > 6:
+ data[n].uv4 = (float(val[6]), float(val[7]))
+ n += 1
+ else:
+ pass
+ #for i in range(n):
+ # defaultKey(key, val, sub, "data[i]", [], globals(), locals())
+ return
+
+#
+# parseVertColorLayer(args, tokens, me):
+# parseVertColorData(args, tokens, data):
#
def parseVertColorLayer(args, tokens, me):
- name = args[0]
- print("VertColorLayer", name)
- vcol = me.vertex_colors.new(name)
- loadedData['MeshColorLayer'][name] = vcol
- for (key, val, sub) in tokens:
- if key == 'Data':
- parseVertColorData(val, sub, vcol.data)
- else:
- defaultKey(key, val, sub, "vcol", [], globals(), locals())
- return
+ name = args[0]
+ print("VertColorLayer", name)
+ vcol = me.vertex_colors.new(name)
+ loadedData['MeshColorLayer'][name] = vcol
+ for (key, val, sub) in tokens:
+ if key == 'Data':
+ parseVertColorData(val, sub, vcol.data)
+ else:
+ defaultKey(key, val, sub, "vcol", [], globals(), locals())
+ return
def parseVertColorData(args, tokens, data):
- n = 0
- for (key, val, sub) in tokens:
- if key == 'cv':
- data[n].color1 = eval(val[0])
- data[n].color2 = eval(val[1])
- data[n].color3 = eval(val[2])
- data[n].color4 = eval(val[3])
- n += 1
- return
+ n = 0
+ for (key, val, sub) in tokens:
+ if key == 'cv':
+ data[n].color1 = eval(val[0])
+ data[n].color2 = eval(val[1])
+ data[n].color3 = eval(val[2])
+ data[n].color4 = eval(val[3])
+ n += 1
+ return
#
-# parseVertexGroup(ob, me, args, tokens):
+# parseVertexGroup(ob, me, args, tokens):
#
def parseVertexGroup(ob, me, args, tokens):
- global toggle
- if verbosity > 2:
- print( "Parsing vertgroup %s" % args )
- grpName = args[0]
- try:
- res = eval(args[1])
- except:
- res = True
- if not res:
- return
-
- if (toggle & T_Armature) or (grpName in ['Eye_L', 'Eye_R', 'Gums', 'Head', 'Jaw', 'Left', 'Middle', 'Right', 'Scalp']):
- group = ob.vertex_groups.new(grpName)
- group.name = grpName
- loadedData['VertexGroup'][grpName] = group
- for (key, val, sub) in tokens:
- if key == 'wv':
- ob.vertex_groups.assign(int(val[0]), group, float(val[1]), 'REPLACE')
- return
-
-
-#
-# parseShapeKeys(ob, me, args, tokens):
-# parseShapeKey(ob, me, args, tokens):
-# addShapeKey(ob, name, vgroup, tokens):
-# doShape(name):
+ global toggle
+ if verbosity > 2:
+ print( "Parsing vertgroup %s" % args )
+ grpName = args[0]
+ try:
+ res = eval(args[1])
+ except:
+ res = True
+ if not res:
+ return
+
+ if (toggle & T_Armature) or (grpName in ['Eye_L', 'Eye_R', 'Gums', 'Head', 'Jaw', 'Left', 'Middle', 'Right', 'Scalp']):
+ group = ob.vertex_groups.new(grpName)
+ group.name = grpName
+ loadedData['VertexGroup'][grpName] = group
+ for (key, val, sub) in tokens:
+ if key == 'wv':
+ ob.vertex_groups.assign(int(val[0]), group, float(val[1]), 'REPLACE')
+ return
+
+
+#
+# parseShapeKeys(ob, me, args, tokens):
+# parseShapeKey(ob, me, args, tokens):
+# addShapeKey(ob, name, vgroup, tokens):
+# doShape(name):
#
def doShape(name):
- if (toggle & T_Shape+T_Face) and (name == 'Basis'):
- return True
- else:
- return (toggle & T_Face)
+ if (toggle & T_Shape+T_Face) and (name == 'Basis'):
+ return True
+ else:
+ return (toggle & T_Face)
def parseShapeKeys(ob, me, args, tokens):
- if bpy.context.object == None:
- return
- for (key, val, sub) in tokens:
- if key == 'ShapeKey':
- parseShapeKey(ob, me, val, sub)
- elif key == 'AnimationData':
- if me.shape_keys:
- parseAnimationData(me.shape_keys, sub)
- return
+ if bpy.context.object == None:
+ return
+ for (key, val, sub) in tokens:
+ if key == 'ShapeKey':
+ parseShapeKey(ob, me, val, sub)
+ elif key == 'AnimationData':
+ if me.shape_keys:
+ parseAnimationData(me.shape_keys, sub)
+ return
def parseShapeKey(ob, me, args, tokens):
- if verbosity > 0:
- print( "Parsing ob %s shape %s" % (bpy.context.object, args[0] ))
- name = args[0]
- lr = args[1]
- if invalid(args[2]):
- return
-
- if lr == 'Sym' or toggle & T_Symm:
- addShapeKey(ob, name, None, tokens)
- elif lr == 'LR':
- addShapeKey(ob, name+'_L', 'Left', tokens)
- addShapeKey(ob, name+'_R', 'Right', tokens)
- else:
- raise NameError("ShapeKey L/R %s" % lr)
- return
+ if verbosity > 0:
+ print( "Parsing ob %s shape %s" % (bpy.context.object, args[0] ))
+ name = args[0]
+ lr = args[1]
+ if invalid(args[2]):
+ return
+
+ if lr == 'Sym' or toggle & T_Symm:
+ addShapeKey(ob, name, None, tokens)
+ elif lr == 'LR':
+ addShapeKey(ob, name+'_L', 'Left', tokens)
+ addShapeKey(ob, name+'_R', 'Right', tokens)
+ else:
+ raise NameError("ShapeKey L/R %s" % lr)
+ return
def addShapeKey(ob, name, vgroup, tokens):
- bpy.ops.object.shape_key_add(False)
- skey = ob.active_shape_key
- if name != 'Basis':
- skey.relative_key = loadedData['ShapeKey']['Basis']
- skey.name = name
- if vgroup:
- skey.vertex_group = vgroup
- loadedData['ShapeKey'][name] = skey
+ bpy.ops.object.shape_key_add(False)
+ skey = ob.active_shape_key
+ if name != 'Basis':
+ skey.relative_key = loadedData['ShapeKey']['Basis']
+ skey.name = name
+ if vgroup:
+ skey.vertex_group = vgroup
+ loadedData['ShapeKey'][name] = skey
- for (key, val, sub) in tokens:
- if key == 'sv':
- index = int(val[0])
- pt = skey.data[index].co
- pt[0] += float(val[1])
- pt[1] += float(val[2])
- pt[2] += float(val[3])
- else:
- defaultKey(key, val, sub, "skey", [], globals(), locals())
+ for (key, val, sub) in tokens:
+ if key == 'sv':
+ index = int(val[0])
+ pt = skey.data[index].co
+ pt[0] += float(val[1])
+ pt[1] += float(val[2])
+ pt[2] += float(val[3])
+ else:
+ defaultKey(key, val, sub, "skey", [], globals(), locals())
- return
+ return
-
+
#
-# parseArmature (obName, args, tokens)
+# parseArmature (obName, args, tokens)
#
def parseArmature (args, tokens):
- global toggle, theScale
- if verbosity > 2:
- print( "Parsing armature %s" % args )
-
- amtname = args[0]
- obname = args[1]
- mode = args[2]
-
- if mode == 'Rigify':
- toggle |= T_Rigify
- theScale = 0.1
- return parseRigify(amtname, obname, tokens)
-
- toggle &= ~T_Rigify
- theScale = 1.0
- amt = bpy.data.armatures.new(amtname)
- ob = createObject('Armature', obname, amt, amtname)
-
- linkObject(ob, amt)
- print("Linked")
-
- bpy.ops.object.mode_set(mode='OBJECT')
- bpy.ops.object.mode_set(mode='EDIT')
-
- heads = {}
- tails = {}
- for (key, val, sub) in tokens:
- if key == 'Bone':
- bname = val[0]
- if not invalid(val[1]):
- bone = amt.edit_bones.new(bname)
- parseBone(bone, amt.edit_bones, sub, heads, tails)
- loadedData['Bone'][bname] = bone
- else:
- defaultKey(key, val, sub, "amt", ['MetaRig'], globals(), locals())
- bpy.ops.object.mode_set(mode='OBJECT')
- return amt
-
-#
-# parseRigify(amtname, obname, tokens):
-#
-
-def parseRigify(amtname, obname, tokens):
- (key,val,sub) = tokens[0]
- if key != 'MetaRig':
- raise NameError("Expected MetaRig")
- typ = val[0]
- if typ == "human":
- bpy.ops.object.armature_human_advanced_add()
- else:
- bpy.ops.pose.metarig_sample_add(type = typ)
- ob = bpy.context.scene.objects.active
- amt = ob.data
- loadedData['Rigify'][obname] = ob
- loadedData['Armature'][amtname] = amt
- loadedData['Object'][obname] = ob
- print("Rigify object", ob, amt)
-
- bpy.ops.object.mode_set(mode='OBJECT')
- bpy.ops.object.mode_set(mode='EDIT')
-
- heads = {}
- tails = {}
- for (bname, bone) in amt.edit_bones.items():
- heads[bname] = 10*theScale*bone.head
- tails[bname] = 10*theScale*bone.tail
-
- for (key, val, sub) in tokens:
- if key == 'Bone':
- bname = val[0]
- print("Bone", bname)
- try:
- bone = amt.edit_bones[bname]
- except:
- print("Did not find bone %s" % bname)
- bone = None
- print(" -> ", bone)
- if bone:
- parseBone(bone, amt.edit_bones, sub, heads, tails)
- else:
- defaultKey(key, val, sub, "amt", ['MetaRig'], globals(), locals())
- bpy.ops.object.mode_set(mode='OBJECT')
- return amt
-
-#
-# parseBone(bone, bones, tokens, heads, tails):
+ global toggle, theScale
+ if verbosity > 2:
+ print( "Parsing armature %s" % args )
+
+ amtname = args[0]
+ obname = args[1]
+ mode = args[2]
+
+ if mode == 'Rigify':
+ toggle |= T_Rigify
+ theScale = 0.1
+ return parseRigify(amtname, obname, tokens)
+
+ toggle &= ~T_Rigify
+ theScale = 1.0
+ amt = bpy.data.armatures.new(amtname)
+ ob = createObject('Armature', obname, amt, amtname)
+
+ linkObject(ob, amt)
+ print("Linked")
+
+ bpy.ops.object.mode_set(mode='OBJECT')
+ bpy.ops.object.mode_set(mode='EDIT')
+
+ heads = {}
+ tails = {}
+ for (key, val, sub) in tokens:
+ if key == 'Bone':
+ bname = val[0]
+ if not invalid(val[1]):
+ bone = amt.edit_bones.new(bname)
+ parseBone(bone, amt.edit_bones, sub, heads, tails)
+ loadedData['Bone'][bname] = bone
+ else:
+ defaultKey(key, val, sub, "amt", ['MetaRig'], globals(), locals())
+ bpy.ops.object.mode_set(mode='OBJECT')
+ return amt
+
+#
+# parseRigify(amtname, obname, tokens):
+#
+
+def parseRigify(amtname, obname, tokens):
+ (key,val,sub) = tokens[0]
+ if key != 'MetaRig':
+ raise NameError("Expected MetaRig")
+ typ = val[0]
+ if typ == "human":
+ bpy.ops.object.armature_human_advanced_add()
+ else:
+ bpy.ops.pose.metarig_sample_add(type = typ)
+ ob = bpy.context.scene.objects.active
+ amt = ob.data
+ loadedData['Rigify'][obname] = ob
+ loadedData['Armature'][amtname] = amt
+ loadedData['Object'][obname] = ob
+ print("Rigify object", ob, amt)
+
+ bpy.ops.object.mode_set(mode='OBJECT')
+ bpy.ops.object.mode_set(mode='EDIT')
+
+ heads = {}
+ tails = {}
+ for (bname, bone) in amt.edit_bones.items():
+ heads[bname] = 10*theScale*bone.head
+ tails[bname] = 10*theScale*bone.tail
+
+ for (key, val, sub) in tokens:
+ if key == 'Bone':
+ bname = val[0]
+ print("Bone", bname)
+ try:
+ bone = amt.edit_bones[bname]
+ except:
+ print("Did not find bone %s" % bname)
+ bone = None
+ print(" -> ", bone)
+ if bone:
+ parseBone(bone, amt.edit_bones, sub, heads, tails)
+ else:
+ defaultKey(key, val, sub, "amt", ['MetaRig'], globals(), locals())
+ bpy.ops.object.mode_set(mode='OBJECT')
+ return amt
+
+#
+# parseBone(bone, bones, tokens, heads, tails):
#
def parseBone(bone, bones, tokens, heads, tails):
- global todo
-
- for (key, val, sub) in tokens:
- if key == "head":
- bone.head = (float(val[0]), float(val[1]), float(val[2]))
- elif key == "tail":
- bone.tail = (float(val[0]), float(val[1]), float(val[2]))
- elif key == "head-as":
- target = val[0]
- if val[1] == 'head':
- bone.head = heads[bone.name] + bones[target].head - heads[target]
- elif val[1] == 'tail':
- bone.head = heads[bone.name] + bones[target].tail - tails[target]
- else:
- raise NameError("head-as %s" % val)
- elif key == "tail-as":
- target = val[0]
- if val[1] == 'head':
- bone.tail = tails[bone.name] + bones[target].head - heads[target]
- elif val[1] == 'tail':
- bone.tail = tails[bone.name] + bones[target].tail - tails[target]
- else:
- raise NameError("tail-as %s" % val)
- elif key == 'hide_select':
- pass
- else:
- defaultKey(key, val, sub, "bone", [], globals(), locals())
-
- return bone
-
-#
-# parsePose (args, tokens):
+ global todo
+
+ for (key, val, sub) in tokens:
+ if key == "head":
+ bone.head = (float(val[0]), float(val[1]), float(val[2]))
+ elif key == "tail":
+ bone.tail = (float(val[0]), float(val[1]), float(val[2]))
+ elif key == "head-as":
+ target = val[0]
+ if val[1] == 'head':
+ bone.head = heads[bone.name] + bones[target].head - heads[target]
+ elif val[1] == 'tail':
+ bone.head = heads[bone.name] + bones[target].tail - tails[target]
+ else:
+ raise NameError("head-as %s" % val)
+ elif key == "tail-as":
+ target = val[0]
+ if val[1] == 'head':
+ bone.tail = tails[bone.name] + bones[target].head - heads[target]
+ elif val[1] == 'tail':
+ bone.tail = tails[bone.name] + bones[target].tail - tails[target]
+ else:
+ raise NameError("tail-as %s" % val)
+ elif key == 'hide_select':
+ pass
+ else:
+ defaultKey(key, val, sub, "bone", [], globals(), locals())
+
+ return bone
+
+#
+# parsePose (args, tokens):
#
def parsePose (args, tokens):
- global todo
- if toggle & T_Rigify:
- return
- name = args[0]
- ob = loadedData['Object'][name]
- bpy.context.scene.objects.active = ob
- bpy.ops.object.mode_set(mode='POSE')
- pbones = ob.pose.bones
- nGrps = 0
- for (key, val, sub) in tokens:
- if key == 'Posebone':
- parsePoseBone(pbones, ob, val, sub)
- elif key == 'BoneGroup':
- parseBoneGroup(ob.pose, nGrps, val, sub)
- nGrps += 1
- elif key == 'SetProp':
- bone = val[0]
- prop = val[1]
- value = eval(val[2])
- pb = pbones[bone]
- print("Setting", pb, prop, val)
- pb[prop] = value
- print("Prop set", pb[prop])
- else:
- defaultKey(key, val, sub, "ob.pose", [], globals(), locals())
- bpy.ops.object.mode_set(mode='OBJECT')
- return ob
-
-
-#
-# parsePoseBone(pbones, args, tokens):
-# parseArray(data, exts, args):
+ global todo
+ if toggle & T_Rigify:
+ return
+ name = args[0]
+ ob = loadedData['Object'][name]
+ bpy.context.scene.objects.active = ob
+ bpy.ops.object.mode_set(mode='POSE')
+ pbones = ob.pose.bones
+ nGrps = 0
+ for (key, val, sub) in tokens:
+ if key == 'Posebone':
+ parsePoseBone(pbones, ob, val, sub)
+ elif key == 'BoneGroup':
+ parseBoneGroup(ob.pose, nGrps, val, sub)
+ nGrps += 1
+ elif key == 'SetProp':
+ bone = val[0]
+ prop = val[1]
+ value = eval(val[2])
+ pb = pbones[bone]
+ print("Setting", pb, prop, val)
+ pb[prop] = value
+ print("Prop set", pb[prop])
+ else:
+ defaultKey(key, val, sub, "ob.pose", [], globals(), locals())
+ bpy.ops.object.mode_set(mode='OBJECT')
+ return ob
+
+
+#
+# parsePoseBone(pbones, args, tokens):
+# parseArray(data, exts, args):
#
def parseBoneGroup(pose, nGrps, args, tokens):
- global todo
- return
- print( "Parsing bonegroup %s" % args )
- name = args[0]
- print(dir(pose.bone_groups))
- bg = pose.bone_groups.add()
- print("Created", bg)
- loadedData['BoneGroup'][name] = bg
- for (key, val, sub) in tokens:
- defaultKey(key, val, sub, "bg", [], globals(), locals())
- return
+ global todo
+ return
+ print( "Parsing bonegroup %s" % args )
+ name = args[0]
+ print(dir(pose.bone_groups))
+ bg = pose.bone_groups.add()
+ print("Created", bg)
+ loadedData['BoneGroup'][name] = bg
+ for (key, val, sub) in tokens:
+ defaultKey(key, val, sub, "bg", [], globals(), locals())
+ return
def parsePoseBone(pbones, ob, args, tokens):
- global todo
- #print( "Parsing posebone %s" % args )
- if invalid(args[1]):
- return
- name = args[0]
- pb = pbones[name]
-
- # Make posebone active - don't know how to do this in pose mode
- bpy.ops.object.mode_set(mode='OBJECT')
- ob.data.bones.active = pb.bone
- bpy.ops.object.mode_set(mode='POSE')
-
- for (key, val, sub) in tokens:
- if key == 'Constraint':
- cns = parseConstraint(pb.constraints, val, sub)
- elif key == 'bpyops':
- expr = "bpy.ops.%s" % val[0]
- print(expr)
- print("ob", bpy.context.active_object)
- print("b", bpy.context.active_bone)
- print("pb", bpy.context.active_pose_bone)
- print("md", bpy.context.mode)
- exec(expr)
- print("show_alive")
- elif key == 'ik_dof':
- parseArray(pb, ["lock_ik_x", "lock_ik_y", "lock_ik_z"], val)
- elif key == 'ik_limit':
- parseArray(pb, ["use_ik_limit_x", "use_ik_limit_y", "use_ik_limit_z"], val)
- elif key == 'ik_max':
- parseArray(pb, ["ik_max_x", "ik_max_y", "ik_max_z"], val)
- elif key == 'ik_min':
- parseArray(pb, ["ik_min_x", "ik_min_y", "ik_min_z"], val)
- elif key == 'ik_stiffness':
- parseArray(pb, ["ik_stiffness_x", "ik_stiffness_y", "ik_stiffness_z"], val)
- else:
- defaultKey(key, val, sub, "pb", [], globals(), locals())
- #print("pb %s done" % name)
- return
+ global todo
+ #print( "Parsing posebone %s" % args )
+ if invalid(args[1]):
+ return
+ name = args[0]
+ pb = pbones[name]
+
+ # Make posebone active - don't know how to do this in pose mode
+ bpy.ops.object.mode_set(mode='OBJECT')
+ ob.data.bones.active = pb.bone
+ bpy.ops.object.mode_set(mode='POSE')
+
+ for (key, val, sub) in tokens:
+ if key == 'Constraint':
+ cns = parseConstraint(pb.constraints, val, sub)
+ elif key == 'bpyops':
+ expr = "bpy.ops.%s" % val[0]
+ print(expr)
+ print("ob", bpy.context.active_object)
+ print("b", bpy.context.active_bone)
+ print("pb", bpy.context.active_pose_bone)
+ print("md", bpy.context.mode)
+ exec(expr)
+ print("show_alive")
+ elif key == 'ik_dof':
+ parseArray(pb, ["lock_ik_x", "lock_ik_y", "lock_ik_z"], val)
+ elif key == 'ik_limit':
+ parseArray(pb, ["use_ik_limit_x", "use_ik_limit_y", "use_ik_limit_z"], val)
+ elif key == 'ik_max':
+ parseArray(pb, ["ik_max_x", "ik_max_y", "ik_max_z"], val)
+ elif key == 'ik_min':
+ parseArray(pb, ["ik_min_x", "ik_min_y", "ik_min_z"], val)
+ elif key == 'ik_stiffness':
+ parseArray(pb, ["ik_stiffness_x", "ik_stiffness_y", "ik_stiffness_z"], val)
+ else:
+ defaultKey(key, val, sub, "pb", [], globals(), locals())
+ #print("pb %s done" % name)
+ return
def parseArray(data, exts, args):
- n = 1
- for ext in exts:
- expr = "data.%s = %s" % (ext, args[n])
- # print(expr)
- exec(expr)
- n += 1
- return
-
+ n = 1
+ for ext in exts:
+ expr = "data.%s = %s" % (ext, args[n])
+ # print(expr)
+ exec(expr)
+ n += 1
+ return
+
#
-# parseConstraint(constraints, args, tokens)
+# parseConstraint(constraints, args, tokens)
#
def parseConstraint(constraints, args, tokens):
- if invalid(args[2]):
- return None
- cns = constraints.new(args[1])
- #bpy.ops.pose.constraint_add(type=args[1])
- #cns = pb.constraints[-1]
-
- cns.name = args[0]
- #print("cns", cns.name)
- for (key,val,sub) in tokens:
- if key == 'invert':
- parseArray(cns, ["invert_x", "invert_y", "invert_z"], val)
- elif key == 'use':
- parseArray(cns, ["use_x", "use_y", "use_z"], val)
- elif key == 'pos_lock':
- parseArray(cns, ["lock_location_x", "lock_location_y", "lock_location_z"], val)
- elif key == 'rot_lock':
- parseArray(cns, ["lock_rotation_x", "lock_rotation_y", "lock_rotation_z"], val)
- else:
- defaultKey(key, val, sub, "cns", [], globals(), locals())
- #print("cns %s done" % cns.name)
- return cns
-
+ if invalid(args[2]):
+ return None
+ cns = constraints.new(args[1])
+ #bpy.ops.pose.constraint_add(type=args[1])
+ #cns = pb.constraints[-1]
+
+ cns.name = args[0]
+ #print("cns", cns.name)
+ for (key,val,sub) in tokens:
+ if key == 'invert':
+ parseArray(cns, ["invert_x", "invert_y", "invert_z"], val)
+ elif key == 'use':
+ parseArray(cns, ["use_x", "use_y", "use_z"], val)
+ elif key == 'pos_lock':
+ parseArray(cns, ["lock_location_x", "lock_location_y", "lock_location_z"], val)
+ elif key == 'rot_lock':
+ parseArray(cns, ["lock_rotation_x", "lock_rotation_y", "lock_rotation_z"], val)
+ else:
+ defaultKey(key, val, sub, "cns", [], globals(), locals())
+ #print("cns %s done" % cns.name)
+ return cns
+
def insertInfluenceIpo(cns, bone):
- global todo
- if bone != 'PArmIK_L' and bone != 'PArmIK_R' and bone != 'PLegIK_L' and bone != 'PLegIK_R':
- return False
-
- if (toggle & T_FKIK):
- fcurve = cns.driver_add("influence", 0)
- fcurve.driver.type = 'AVERAGE'
-
- var = fcurve.driver.variables.new()
- var.name = bone
- var.targets[0].id_type = 'OBJECT'
- var.targets[0].id = getObject('HumanRig', 'var.targets[0].id', globals(), locals())
- var.targets[0].bone_target = bone
- var.targets[0].transform_type = 'LOC_X'
- # controller_path = fk_chain.arm_p.path_to_id()
- #var.targets[0].data_path = controller_path + '["use_hinge"]'
-
- mod = fcurve.modifiers[0]
- mod.poly_order = 2
- mod.coefficients[0] = 0.0
- mod.coefficients[1] = 1.0
- elif bone == 'PArmIK_L' or bone == 'PArmIK_R':
- if toggle & T_ArmIK:
- cns.influence = 1.0
- else:
- cns.influence = 0.0
- elif bone == 'PLegIK_L' or bone == 'PLegIK_R':
- if toggle & T_LegIK:
- cns.influence = 1.0
- else:
- cns.influence = 0.0
-
- return True
-
-#
-# parseCurve (args, tokens):
-# parseNurb(cu, nNurbs, args, tokens):
-# parseBezier(nurb, n, args, tokens):
+ global todo
+ if bone != 'PArmIK_L' and bone != 'PArmIK_R' and bone != 'PLegIK_L' and bone != 'PLegIK_R':
+ return False
+
+ if (toggle & T_FKIK):
+ fcurve = cns.driver_add("influence", 0)
+ fcurve.driver.type = 'AVERAGE'
+
+ var = fcurve.driver.variables.new()
+ var.name = bone
+ var.targets[0].id_type = 'OBJECT'
+ var.targets[0].id = getObject('HumanRig', 'var.targets[0].id', globals(), locals())
+ var.targets[0].bone_target = bone
+ var.targets[0].transform_type = 'LOC_X'
+ # controller_path = fk_chain.arm_p.path_to_id()
+ #var.targets[0].data_path = controller_path + '["use_hinge"]'
+
+ mod = fcurve.modifiers[0]
+ mod.poly_order = 2
+ mod.coefficients[0] = 0.0
+ mod.coefficients[1] = 1.0
+ elif bone == 'PArmIK_L' or bone == 'PArmIK_R':
+ if toggle & T_ArmIK:
+ cns.influence = 1.0
+ else:
+ cns.influence = 0.0
+ elif bone == 'PLegIK_L' or bone == 'PLegIK_R':
+ if toggle & T_LegIK:
+ cns.influence = 1.0
+ else:
+ cns.influence = 0.0
+
+ return True
+
+#
+# parseCurve (args, tokens):
+# parseNurb(cu, nNurbs, args, tokens):
+# parseBezier(nurb, n, args, tokens):
#
def parseCurve (args, tokens):
- global todo
- if verbosity > 2:
- print( "Parsing curve %s" % args )
- cu = createObjectAndData(args, 'Curve')
-
- nNurbs = 0
- for (key, val, sub) in tokens:
- if key == 'Nurb':
- parseNurb(cu, nNurbs, val, sub)
- nNurbs += 1
- else:
- defaultKey(key, val, sub, "cu", [], globals(), locals())
- return
+ global todo
+ if verbosity > 2:
+ print( "Parsing curve %s" % args )
+ cu = createObjectAndData(args, 'Curve')
+
+ nNurbs = 0
+ for (key, val, sub) in tokens:
+ if key == 'Nurb':
+ parseNurb(cu, nNurbs, val, sub)
+ nNurbs += 1
+ else:
+ defaultKey(key, val, sub, "cu", [], globals(), locals())
+ return
def parseNurb(cu, nNurbs, args, tokens):
- if nNurbs > 0:
- bpy.ops.object.curve_add(type='BEZIER_CURVE')
- print(cu.splines, list(cu.splines), nNurbs)
- nurb = cu.splines[nNurbs]
- nPoints = int(args[0])
- print(nurb, nPoints)
- for n in range(2, nPoints):
- bpy.ops.curve.extrude(mode=1)
-
- n = 0
- for (key, val, sub) in tokens:
- if key == 'bz':
- parseBezier(nurb, n, val, sub)
- n += 1
- elif key == 'pt':
- parsePoint(nurb, n, val, sub)
- n += 1
- else:
- defaultKey(key, val, sub, "nurb", [], globals(), locals())
- return
-
+ if nNurbs > 0:
+ bpy.ops.object.curve_add(type='BEZIER_CURVE')
+ print(cu.splines, list(cu.splines), nNurbs)
+ nurb = cu.splines[nNurbs]
+ nPoints = int(args[0])
+ print(nurb, nPoints)
+ for n in range(2, nPoints):
+ bpy.ops.curve.extrude(mode=1)
+
+ n = 0
+ for (key, val, sub) in tokens:
+ if key == 'bz':
+ parseBezier(nurb, n, val, sub)
+ n += 1
+ elif key == 'pt':
+ parsePoint(nurb, n, val, sub)
+ n += 1
+ else:
+ defaultKey(key, val, sub, "nurb", [], globals(), locals())
+ return
+
def parseBezier(nurb, n, args, tokens):
- bez = nurb[n]
- bez.co = eval(args[0])
- bez.handle_left = eval(args[1])
- bez.handle_left_type = args[2]
- bez.handle_right = eval(args[3])
- bez.handle_right_type = args[4]
- return
+ bez = nurb[n]
+ bez.co = eval(args[0])
+ bez.handle_left = eval(args[1])
+ bez.handle_left_type = args[2]
+ bez.handle_right = eval(args[3])
+ bez.handle_right_type = args[4]
+ return
def parsePoint(nurb, n, args, tokens):
- pt = nurb[n]
- pt.co = eval(args[0])
- return
+ pt = nurb[n]
+ pt.co = eval(args[0])
+ return
#
-# parseLattice (args, tokens):
+# parseLattice (args, tokens):
#
def parseLattice (args, tokens):
- global todo
- if verbosity > 2:
- print( "Parsing lattice %s" % args )
- lat = createObjectAndData(args, 'Lattice')
- for (key, val, sub) in tokens:
- if key == 'Points':
- parseLatticePoints(val, sub, lat.points)
- else:
- defaultKey(key, val, sub, "lat", [], globals(), locals())
- return
+ global todo
+ if verbosity > 2:
+ print( "Parsing lattice %s" % args )
+ lat = createObjectAndData(args, 'Lattice')
+ for (key, val, sub) in tokens:
+ if key == 'Points':
+ parseLatticePoints(val, sub, lat.points)
+ else:
+ defaultKey(key, val, sub, "lat", [], globals(), locals())
+ return
def parseLatticePoints(args, tokens, points):
- global todo
- n = 0
- for (key, val, sub) in tokens:
- if key == 'pt':
- v = points[n].co
- (x,y,z) = eval(val[0])
- v.x = x
- v.y = y
- v.z = z
+ global todo
+ n = 0
+ for (key, val, sub) in tokens:
+ if key == 'pt':
+ v = points[n].co
+ (x,y,z) = eval(val[0])
+ v.x = x
+ v.y = y
+ v.z = z
- v = points[n].co_deform
- (x,y,z) = eval(val[1])
- v.x = x
- v.y = y
- v.z = z
+ v = points[n].co_deform
+ (x,y,z) = eval(val[1])
+ v.x = x
+ v.y = y
+ v.z = z
- n += 1
- return
+ n += 1
+ return
#
-# parseGroup (args, tokens):
+# parseGroup (args, tokens):
#
def parseGroup (args, tokens):
- global todo
- if verbosity > 2:
- print( "Parsing group %s" % args )
-
- grpName = args[0]
- grp = bpy.data.groups.new(grpName)
- loadedData['Group'][grpName] = grp
- for (key, val, sub) in tokens:
- if key == 'Objects':
- parseGroupObjects(val, sub, grp)
- else:
- defaultKey(key, val, sub, "grp", [], globals(), locals())
- return
+ global todo
+ if verbosity > 2:
+ print( "Parsing group %s" % args )
+
+ grpName = args[0]
+ grp = bpy.data.groups.new(grpName)
+ loadedData['Group'][grpName] = grp
+ for (key, val, sub) in tokens:
+ if key == 'Objects':
+ parseGroupObjects(val, sub, grp)
+ else:
+ defaultKey(key, val, sub, "grp", [], globals(), locals())
+ return
def parseGroupObjects(args, tokens, grp):
- global todo
- for (key, val, sub) in tokens:
- if key == 'ob':
- try:
- ob = loadedData['Object'][val[0]]
- grp.objects.link(ob)
- except:
- pass
- return
+ global todo
+ for (key, val, sub) in tokens:
+ if key == 'ob':
+ try:
+ ob = loadedData['Object'][val[0]]
+ grp.objects.link(ob)
+ except:
+ pass
+ return
#
-# postProcess()
-# setInfluence(bones, cnsName, w):
+# postProcess()
+# setInfluence(bones, cnsName, w):
#
def postProcess():
- if not toggle & T_MHX:
- return
- if toggle & T_Rigify:
- return
- for rig in loadedData['Rigify'].values():
- bpy.context.scene.objects.active = rig
- print("Rigify", rig)
- bpy.ops.pose.metarig_generate()
- print("Metarig generated")
- #bpy.context.scene.objects.unlink(rig)
- rig = bpy.context.scene.objects.active
- print("Rigged", rig, bpy.context.object)
- ob = loadedData['Object']['Human']
- mod = ob.modifiers[0]
- print(ob, mod, mod.object)
- mod.object = rig
- print("Rig changed", mod.object)
- return
-
-#
-# parseProcess(args, tokens):
+ if not toggle & T_MHX:
+ return
+ if toggle & T_Rigify:
+ return
+ for rig in loadedData['Rigify'].values():
+ bpy.context.scene.objects.active = rig
+ print("Rigify", rig)
+ bpy.ops.pose.metarig_generate()
+ print("Metarig generated")
+ #bpy.context.scene.objects.unlink(rig)
+ rig = bpy.context.scene.objects.active
+ print("Rigged", rig, bpy.context.object)
+ ob = loadedData['Object']['Human']
+ mod = ob.modifiers[0]
+ print(ob, mod, mod.object)
+ mod.object = rig
+ print("Rig changed", mod.object)
+ return
+
+#
+# parseProcess(args, tokens):
#
def parseProcess(args, tokens):
- return
- rig = loadedData['Object'][args[0]]
- parents = {}
- objects = []
-
- for (key, val, sub) in tokens:
- if key == 'Reparent':
- bname = val[0]
- try:
- eb = ebones[bname]
- parents[bname] = eb.parent.name
- eb.parent = ebones[val[1]]
- except:
- pass
- elif key == 'Bend':
- print(val)
- axis = val[1]
- angle = float(val[2])
- mat = mathutils.Matrix.Rotation(angle, 4, axis)
- try:
- pb = pbones[val[0]]
- prod = pb.matrix_local * mat
- for i in range(4):
- for j in range(4):
- pb.matrix_local[i][j] = prod[i][j]
- print("Done", pb.matrix_local)
- except:
- pass
- elif key == 'Pose':
- bpy.context.scene.objects.active = rig
- bpy.ops.object.mode_set(mode='POSE')
- pbones = rig.pose.bones
- elif key == 'Edit':
- bpy.context.scene.objects.active = rig
- bpy.ops.object.mode_set(mode='EDIT')
- ebones = rig.data.edit_bones
- elif key == 'Object':
- bpy.ops.object.mode_set(mode='OBJECT')
- try:
- ob = loadedData['Object'][val[0]]
- objects.append((ob,sub))
- except:
- ob = None
- if ob:
- bpy.context.scene.objects.active = ob
- mod = ob.modifiers[0]
- ob.modifiers.remove(mod)
- for (key1, val1, sub1) in sub:
- if key1 == 'Modifier':
- parseModifier(ob, val1, sub1)
-
- for (ob,tokens) in objects:
- bpy.context.scene.objects.active = ob
- bpy.ops.object.visual_transform_apply()
- #print("vis", list(ob.modifiers))
- bpy.ops.object.modifier_apply(apply_as='DATA', modifier='Armature')
- #print("app", list(ob.modifiers))
-
- bpy.context.scene.objects.active = rig
- bpy.ops.object.mode_set(mode='POSE')
- bpy.ops.pose.armature_apply()
- bpy.ops.object.mode_set(mode='EDIT')
- ebones = rig.data.edit_bones
- for (bname, pname) in parents.items():
- eb = ebones[bname]
- par = ebones[pname]
- if eb.use_connect:
- par.tail = eb.head
- eb.parent = par
- bpy.ops.object.mode_set(mode='OBJECT')
-
- for (ob,tokens) in objects:
- bpy.context.scene.objects.active = ob
- for (key, val, sub) in tokens:
- if key == 'Modifier':
- parseModifier(ob, val, sub)
-
- return
-
-#
-# defaultKey(ext, args, tokens, var, exclude, glbals, lcals):
+ return
+ rig = loadedData['Object'][args[0]]
+ parents = {}
+ objects = []
+
+ for (key, val, sub) in tokens:
+ if key == 'Reparent':
+ bname = val[0]
+ try:
+ eb = ebones[bname]
+ parents[bname] = eb.parent.name
+ eb.parent = ebones[val[1]]
+ except:
+ pass
+ elif key == 'Bend':
+ print(val)
+ axis = val[1]
+ angle = float(val[2])
+ mat = mathutils.Matrix.Rotation(angle, 4, axis)
+ try:
+ pb = pbones[val[0]]
+ prod = pb.matrix_local * mat
+ for i in range(4):
+ for j in range(4):
+ pb.matrix_local[i][j] = prod[i][j]
+ print("Done", pb.matrix_local)
+ except:
+ pass
+ elif key == 'Pose':
+ bpy.context.scene.objects.active = rig
+ bpy.ops.object.mode_set(mode='POSE')
+ pbones = rig.pose.bones
+ elif key == 'Edit':
+ bpy.context.scene.objects.active = rig
+ bpy.ops.object.mode_set(mode='EDIT')
+ ebones = rig.data.edit_bones
+ elif key == 'Object':
+ bpy.ops.object.mode_set(mode='OBJECT')
+ try:
+ ob = loadedData['Object'][val[0]]
+ objects.append((ob,sub))
+ except:
+ ob = None
+ if ob:
+ bpy.context.scene.objects.active = ob
+ mod = ob.modifiers[0]
+ ob.modifiers.remove(mod)
+ for (key1, val1, sub1) in sub:
+ if key1 == 'Modifier':
+ parseModifier(ob, val1, sub1)
+
+ for (ob,tokens) in objects:
+ bpy.context.scene.objects.active = ob
+ bpy.ops.object.visual_transform_apply()
+ #print("vis", list(ob.modifiers))
+ bpy.ops.object.modifier_apply(apply_as='DATA', modifier='Armature')
+ #print("app", list(ob.modifiers))
+
+ bpy.context.scene.objects.active = rig
+ bpy.ops.object.mode_set(mode='POSE')
+ bpy.ops.pose.armature_apply()
+ bpy.ops.object.mode_set(mode='EDIT')
+ ebones = rig.data.edit_bones
+ for (bname, pname) in parents.items():
+ eb = ebones[bname]
+ par = ebones[pname]
+ if eb.use_connect:
+ par.tail = eb.head
+ eb.parent = par
+ bpy.ops.object.mode_set(mode='OBJECT')
+
+ for (ob,tokens) in objects:
+ bpy.context.scene.objects.active = ob
+ for (key, val, sub) in tokens:
+ if key == 'Modifier':
+ parseModifier(ob, val, sub)
+
+ return
+
+#
+# defaultKey(ext, args, tokens, var, exclude, glbals, lcals):
#
def defaultKey(ext, args, tokens, var, exclude, glbals, lcals):
- global todo
-
- if ext == 'Property':
- expr = "%s['%s'] = %s" % (var, args[0], args[1])
- print("Property", expr)
- exec(expr, glbals, lcals)
- #print("execd")
- return
-
- nvar = "%s.%s" % (var, ext)
- # print(ext)
- if ext in exclude:
- return
- #print("D", nvar)
-
- if len(args) == 0:
- raise NameError("Key length 0: %s" % ext)
-
- rnaType = args[0]
- if rnaType == 'Add':
- print("*** Cannot Add yet ***")
- return
-
- elif rnaType == 'Refer':
- typ = args[1]
- name = args[2]
- data = "loadedData['%s']['%s']" % (typ, name)
-
- elif rnaType == 'Struct' or rnaType == 'Define':
- typ = args[1]
- name = args[2]
- try:
- data = eval(nvar, glbals, lcals)
- except:
- data = None
- # print("Old structrna", nvar, data)
-
- if data == None:
- try:
- creator = args[3]
- except:
- creator = None
- # print("Creator", creator, eval(var,glbals,lcals))
-
- try:
- rna = eval(var,glbals,lcals)
- data = eval(creator)
- except:
- data = None
- # print("New struct", nvar, typ, data)
-
- if rnaType == 'Define':
- loadedData[typ][name] = data
-
- if data:
- for (key, val, sub) in tokens:
- defaultKey(key, val, sub, "data", [], globals(), locals())
-
- print("Struct done", nvar)
- return
-
- elif rnaType == 'PropertyRNA':
- raise NameError("PropertyRNA!")
- #print("PropertyRNA ", ext, var)
- for (key, val, sub) in tokens:
- defaultKey(ext, val, sub, nvar, [], glbals, lcals)
- return
-
- elif rnaType == 'Array':
- for n in range(1, len(args)):
- expr = "%s[%d] = %s" % (nvar, n-1, args[n])
- exec(expr, glbals, lcals)
- if len(args) > 0:
- expr = "%s[0] = %s" % (nvar, args[1])
- exec(expr, glbals, lcals)
- return
-
- elif rnaType == 'List':
- data = []
- for (key, val, sub) in tokens:
- elt = eval(val[1], glbals, lcals)
- data.append(elt)
-
- elif rnaType == 'Matrix':
- return
- i = 0
- n = len(tokens)
- for (key, val, sub) in tokens:
- if key == 'row':
- for j in range(n):
- expr = "%s[%d][%d] = %g" % (nvar, i, j, float(val[j]))
- exec(expr, glbals, lcals)
- i += 1
- return
-
- else:
- try:
- data = loadedData[rnaType][args[1]]
- #print("From loaded", rnaType, args[1], data)
- return data
- except:
- data = rnaType
-
- #print(var, ext, data)
- expr = "%s = %s" % (nvar, data)
- try:
- exec(expr, glbals, lcals)
- except:
- #print("Failed ",expr)
- todo.append((expr, glbals, lcals))
- return
-
-#
-# parseBoolArray(mask):
+ global todo
+
+ if ext == 'Property':
+ expr = "%s['%s'] = %s" % (var, args[0], args[1])
+ print("Property", expr)
+ exec(expr, glbals, lcals)
+ #print("execd")
+ return
+
+ nvar = "%s.%s" % (var, ext)
+ # print(ext)
+ if ext in exclude:
+ return
+ #print("D", nvar)
+
+ if len(args) == 0:
+ raise NameError("Key length 0: %s" % ext)
+
+ rnaType = args[0]
+ if rnaType == 'Add':
+ print("*** Cannot Add yet ***")
+ return
+
+ elif rnaType == 'Refer':
+ typ = args[1]
+ name = args[2]
+ data = "loadedData['%s']['%s']" % (typ, name)
+
+ elif rnaType == 'Struct' or rnaType == 'Define':
+ typ = args[1]
+ name = args[2]
+ try:
+ data = eval(nvar, glbals, lcals)
+ except:
+ data = None
+ # print("Old structrna", nvar, data)
+
+ if data == None:
+ try:
+ creator = args[3]
+ except:
+ creator = None
+ # print("Creator", creator, eval(var,glbals,lcals))
+
+ try:
+ rna = eval(var,glbals,lcals)
+ data = eval(creator)
+ except:
+ data = None
+ # print("New struct", nvar, typ, data)
+
+ if rnaType == 'Define':
+ loadedData[typ][name] = data
+
+ if data:
+ for (key, val, sub) in tokens:
+ defaultKey(key, val, sub, "data", [], globals(), locals())
+
+ print("Struct done", nvar)
+ return
+
+ elif rnaType == 'PropertyRNA':
+ raise NameError("PropertyRNA!")
+ #print("PropertyRNA ", ext, var)
+ for (key, val, sub) in tokens:
+ defaultKey(ext, val, sub, nvar, [], glbals, lcals)
+ return
+
+ elif rnaType == 'Array':
+ for n in range(1, len(args)):
+ expr = "%s[%d] = %s" % (nvar, n-1, args[n])
+ exec(expr, glbals, lcals)
+ if len(args) > 0:
+ expr = "%s[0] = %s" % (nvar, args[1])
+ exec(expr, glbals, lcals)
+ return
+
+ elif rnaType == 'List':
+ data = []
+ for (key, val, sub) in tokens:
+ elt = eval(val[1], glbals, lcals)
+ data.append(elt)
+
+ elif rnaType == 'Matrix':
+ return
+ i = 0
+ n = len(tokens)
+ for (key, val, sub) in tokens:
+ if key == 'row':
+ for j in range(n):
+ expr = "%s[%d][%d] = %g" % (nvar, i, j, float(val[j]))
+ exec(expr, glbals, lcals)
+ i += 1
+ return
+
+ else:
+ try:
+ data = loadedData[rnaType][args[1]]
+ #print("From loaded", rnaType, args[1], data)
+ return data
+ except:
+ data = rnaType
+
+ #print(var, ext, data)
+ expr = "%s = %s" % (nvar, data)
+ try:
+ exec(expr, glbals, lcals)
+ except:
+ #print("Failed ",expr)
+ todo.append((expr, glbals, lcals))
+ return
+
+#
+# parseBoolArray(mask):
#
def parseBoolArray(mask):
- list = []
- for c in mask:
- if c == '0':
- list.append(False)
- else:
- list.append(True)
- return list
+ list = []
+ for c in mask:
+ if c == '0':
+ list.append(False)
+ else:
+ list.append(True)
+ return list
-# parseMatrix(args, tokens)
+# parseMatrix(args, tokens)
#
def parseMatrix(args, tokens):
- matrix = Matrix( [1,0,0,0], [0,1,0,0], [0,0,1,0], [0,0,0,1] )
- i = 0
- for (key, val, sub) in tokens:
- if key == 'row':
- matrix[i][0] = float(val[0])
- matrix[i][1] = float(val[1])
- matrix[i][2] = float(val[2])
- matrix[i][3] = float(val[3])
- i += 1
- return matrix
+ matrix = Matrix( [1,0,0,0], [0,1,0,0], [0,0,1,0], [0,0,0,1] )
+ i = 0
+ for (key, val, sub) in tokens:
+ if key == 'row':
+ matrix[i][0] = float(val[0])
+ matrix[i][1] = float(val[1])
+ matrix[i][2] = float(val[2])
+ matrix[i][3] = float(val[3])
+ i += 1
+ return matrix
#
-# parseDefault(data, tokens, exclude):
+# parseDefault(data, tokens, exclude):
#
def parseDefault(data, tokens):
- for (key, val, sub) in tokens:
- defaultKey(key, val, sub, "data", exclude, globals(), locals())
+ for (key, val, sub) in tokens:
+ defaultKey(key, val, sub, "data", exclude, globals(), locals())
#
-# Utilities
+# Utilities
#
#
-# extractBpyType(data):
+# extractBpyType(data):
#
def extractBpyType(data):
- typeSplit = str(type(data)).split("'")
- if typeSplit[0] != '<class ':
- return None
- classSplit = typeSplit[1].split(".")
- if classSplit[0] == 'bpy' and classSplit[1] == 'types':
- return classSplit[2]
- elif classSplit[0] == 'bpy_types':
- return classSplit[1]
- else:
- return None
+ typeSplit = str(type(data)).split("'")
+ if typeSplit[0] != '<class ':
+ return None
+ classSplit = typeSplit[1].split(".")
+ if classSplit[0] == 'bpy' and classSplit[1] == 'types':
+ return classSplit[2]
+ elif classSplit[0] == 'bpy_types':
+ return classSplit[1]
+ else:
+ return None
#
-# Bool(string):
+# Bool(string):
#
def Bool(string):
- if string == 'True':
- return True
- elif string == 'False':
- return False
- else:
- raise NameError("Bool %s?" % string)
-
+ if string == 'True':
+ return True
+ elif string == 'False':
+ return False
+ else:
+ raise NameError("Bool %s?" % string)
+
#
-# invalid(condition):
+# invalid(condition):
#
def invalid(condition):
- global rigLeg, rigArm, toggle
- res = eval(condition, globals())
- try:
- res = eval(condition, globals())
- #print("%s = %s" % (condition, res))
- return not res
- except:
- #print("%s invalid!" % condition)
- return True
-
-#
-# clearScene(context):
-# hideLayers():
-#
-
+ global rigLeg, rigArm, toggle
+ res = eval(condition, globals())
+ try:
+ res = eval(condition, globals())
+ #print("%s = %s" % (condition, res))
+ return not res
+ except:
+ #print("%s invalid!" % condition)
+ return True
+
+#
+# clearScene(context):
+# hideLayers():
+#
+
def clearScene():
- global toggle
- scn = bpy.context.scene
- for n in range(len(scn.layers)):
- scn.layers[n] = True
- print("clearScene %s %s" % (toggle & T_Replace, scn))
- if not toggle & T_Replace:
- return scn
-
- for ob in scn.objects:
- if ob.type == "MESH" or ob.type == "ARMATURE":
- scn.objects.active = ob
- bpy.ops.object.mode_set(mode='OBJECT')
- scn.objects.unlink(ob)
- del ob
- #print(scn.objects)
- return scn
+ global toggle
+ scn = bpy.context.scene
+ for n in range(len(scn.layers)):
+ scn.layers[n] = True
+ print("clearScene %s %s" % (toggle & T_Replace, scn))
+ if not toggle & T_Replace:
+ return scn
+
+ for ob in scn.objects:
+ if ob.type == "MESH" or ob.type == "ARMATURE":
+ scn.objects.active = ob
+ bpy.ops.object.mode_set(mode='OBJECT')
+ scn.objects.unlink(ob)
+ del ob
+ #print(scn.objects)
+ return scn
def hideLayers():
- scn = bpy.context.scene
- for n in range(len(scn.layers)):
- if n < 3:
- scn.layers[n] = True
- else:
- scn.layers[n] = False
- return
+ scn = bpy.context.scene
+ for n in range(len(scn.layers)):
+ if n < 3:
+ scn.layers[n] = True
+ else:
+ scn.layers[n] = False
+ return
#
-# User interface
+# User interface
#
DEBUG= False
from bpy.props import *
class IMPORT_OT_makehuman_mhx(bpy.types.Operator):
- '''Import from MHX file format (.mhx)'''
- bl_idname = "import_scene.makehuman_mhx"
- bl_description = 'Import from MHX file format (.mhx)'
- bl_label = "Import MHX"
- bl_space_type = "PROPERTIES"
- bl_region_type = "WINDOW"
-
- filepath = StringProperty(name="File Path", description="Filepath used for importing the MHX file", maxlen= 1024, default= "")
-
- #preset = BoolProperty(name="Use rig preset", description="Use rig preset (Classic/Gobo)?", default=True)
- #presetRig = EnumProperty(name="Rig", description="Choose preset rig",
- # items = [('Classic','Classic','Classic'), ('Gobo','Gobo','Gobo')], default = '1')
- footRig = EnumProperty(name="Foot rig", description="Foot rig",
- items = [('Reverse foot','Reverse foot','Reverse foot'), ('Gobo','Gobo','Gobo')], default = '1')
- fingerRig = EnumProperty(name="Finger rig", description="Finger rig",
- items = [('Panel','Panel','Panel'), ('Curl','Curl','Curl'), ('IK','IK','IK')], default = '1')
-
- mesh = BoolProperty(name="Mesh", description="Use main mesh", default=toggle&T_Mesh)
- armature = BoolProperty(name="Armature", description="Use armature", default=toggle&T_Armature)
- proxy = BoolProperty(name="Proxy", description="Use proxy object", default=toggle&T_Proxy)
- replace = BoolProperty(name="Replace scene", description="Replace scene", default=toggle&T_Replace)
- face = BoolProperty(name="Face shapes", description="Include facial shapekeys", default=toggle&T_Face)
- shape = BoolProperty(name="Body shapes", description="Include body shapekeys", default=toggle&T_Shape)
- symm = BoolProperty(name="Symmetric shapes", description="Keep shapekeys symmetric", default=toggle&T_Symm)
-
- def execute(self, context):
- global toggle
- O_Mesh = T_Mesh if self.properties.mesh else 0
- O_Armature = T_Armature if self.properties.armature else 0
- O_Proxy = T_Proxy if self.properties.proxy else 0
- O_Replace = T_Replace if self.properties.replace else 0
- O_Face = T_Face if self.properties.face else 0
- O_Shape = T_Shape if self.properties.shape else 0
- O_Symm = T_Symm if self.properties.symm else 0
- #O_Preset = T_Preset if self.properties.preset else 0
- toggle = O_Mesh | O_Armature | O_Proxy | T_ArmIK | T_LegIK | O_Replace | O_Face | O_Shape | O_Symm | T_MHX
-
-
- readMhxFile(self.properties.filepath,
- (self.properties.footRig,
- self.properties.fingerRig))
- return {'FINISHED'}
-
- def invoke(self, context, event):
- wm = context.manager
- wm.add_fileselect(self)
- return {'RUNNING_MODAL'}
+ '''Import from MHX file format (.mhx)'''
+ bl_idname = "import_scene.makehuman_mhx"
+ bl_description = 'Import from MHX file format (.mhx)'
+ bl_label = "Import MHX"
+ bl_space_type = "PROPERTIES"
+ bl_region_type = "WINDOW"
+
+ filepath = StringProperty(name="File Path", description="Filepath used for importing the MHX file", maxlen= 1024, default= "")
+
+ #preset = BoolProperty(name="Use rig preset", description="Use rig preset (Classic/Gobo)?", default=True)
+ #presetRig = EnumProperty(name="Rig", description="Choose preset rig",
+ # items = [('Classic','Classic','Classic'), ('Gobo','Gobo','Gobo')], default = '1')
+ footRig = EnumProperty(name="Foot rig", description="Foot rig",
+ items = [('Reverse foot','Reverse foot','Reverse foot'), ('Gobo','Gobo','Gobo')], default = '1')
+ fingerRig = EnumProperty(name="Finger rig", description="Finger rig",
+ items = [('Panel','Panel','Panel'), ('Curl','Curl','Curl'), ('IK','IK','IK')], default = '1')
+
+ mesh = BoolProperty(name="Mesh", description="Use main mesh", default=toggle&T_Mesh)
+ armature = BoolProperty(name="Armature", description="Use armature", default=toggle&T_Armature)
+ proxy = BoolProperty(name="Proxy", description="Use proxy object", default=toggle&T_Proxy)
+ replace = BoolProperty(name="Replace scene", description="Replace scene", default=toggle&T_Replace)
+ face = BoolProperty(name="Face shapes", description="Include facial shapekeys", default=toggle&T_Face)
+ shape = BoolProperty(name="Body shapes", description="Include body shapekeys", default=toggle&T_Shape)
+ symm = BoolProperty(name="Symmetric shapes", description="Keep shapekeys symmetric", default=toggle&T_Symm)
+
+ def execute(self, context):
+ global toggle
+ O_Mesh = T_Mesh if self.properties.mesh else 0
+ O_Armature = T_Armature if self.properties.armature else 0
+ O_Proxy = T_Proxy if self.properties.proxy else 0
+ O_Replace = T_Replace if self.properties.replace else 0
+ O_Face = T_Face if self.properties.face else 0
+ O_Shape = T_Shape if self.properties.shape else 0
+ O_Symm = T_Symm if self.properties.symm else 0
+ #O_Preset = T_Preset if self.properties.preset else 0
+ toggle = O_Mesh | O_Armature | O_Proxy | T_ArmIK | T_LegIK | O_Replace | O_Face | O_Shape | O_Symm | T_MHX
+
+
+ readMhxFile(self.properties.filepath,
+ (self.properties.footRig,
+ self.properties.fingerRig))
+ return {'FINISHED'}
+
+ def invoke(self, context, event):
+ wm = context.manager
+ wm.add_fileselect(self)
+ return {'RUNNING_MODAL'}
'''
class MakeHumanFKIKPanel(bpy.types.Panel):
- bl_label = "MakeHuman FK/IK"
- bl_space_type = "VIEW_3D"
- bl_region_type = "UI"
-
- def draw(self, context):
- layout = self.layout
- ob = bpy.context.active_object
- if ob.type == 'ARMATURE':
- layout.row().prop(ob, "PArmIK_L")
- layout.row().prop(ob, "PArmIK_R")
- layout.row().prop(ob, "PLegIK_L")
- layout.row().prop(ob, "PLegIK_R")
-
- layout.row().prop(ob, "PHandLocal_L")
- layout.row().prop(ob, "PHandLocal_R")
- layout.row().prop(ob, "PFootLocal_L")
- layout.row().prop(ob, "PFootLocal_R")
- return
-
+ bl_label = "MakeHuman FK/IK"
+ bl_space_type = "VIEW_3D"
+ bl_region_type = "UI"
+
+ def draw(self, context):
+ layout = self.layout
+ ob = bpy.context.active_object
+ if ob.type == 'ARMATURE':
+ layout.row().prop(ob, "PArmIK_L")
+ layout.row().prop(ob, "PArmIK_R")
+ layout.row().prop(ob, "PLegIK_L")
+ layout.row().prop(ob, "PLegIK_R")
+
+ layout.row().prop(ob, "PHandLocal_L")
+ layout.row().prop(ob, "PHandLocal_R")
+ layout.row().prop(ob, "PFootLocal_L")
+ layout.row().prop(ob, "PFootLocal_R")
+ return
+
class MakeHumanFingerPanel(bpy.types.Panel):
- bl_label = "MakeHuman Fingers"
- bl_space_type = "VIEW_3D"
- bl_region_type = "UI"
-
- def draw(self, context):
- layout = self.layout
- pb = bpy.context.active_pose_bone
- layout.row().prop(pb, "MHRelax")
- layout.row().prop(pb, "MHCurl")
- layout.row().prop(pb, "MHCone")
- layout.row().prop(pb, "MHSpread")
- layout.row().prop(pb, "MHScrunch")
- layout.row().prop(pb, "MHLean")
- return
-
+ bl_label = "MakeHuman Fingers"
+ bl_space_type = "VIEW_3D"
+ bl_region_type = "UI"
+
+ def draw(self, context):
+ layout = self.layout
+ pb = bpy.context.active_pose_bone
+ layout.row().prop(pb, "MHRelax")
+ layout.row().prop(pb, "MHCurl")
+ layout.row().prop(pb, "MHCone")
+ layout.row().prop(pb, "MHSpread")
+ layout.row().prop(pb, "MHScrunch")
+ layout.row().prop(pb, "MHLean")
+ return
+
def registerPanels():
- bpy.types.Object.FloatProperty(attr="PArmIK_L", name="L arm - IK", default = 0, min = 0.0, max = 1.0)
- bpy.types.Object.FloatProperty(attr="PArmIK_R", name="R arm - IK", default = 0, min = 0.0, max = 1.0)
- bpy.types.Object.FloatProperty(attr="PLegIK_L", name="L leg - IK", default = 0, min = 0.0, max = 1.0)
- bpy.types.Object.FloatProperty(attr="PLegIK_R", name="R leg - IK", default = 0, min = 0.0, max = 1.0)
-
- bpy.types.Object.FloatProperty(attr="PHandLocal_L", name="L hand - Loc", default = 0, min = 0.0, max = 1.0)
- bpy.types.Object.FloatProperty(attr="PHandLocal_R", name="R hand - Loc", default = 0, min = 0.0, max = 1.0)
- bpy.types.Object.FloatProperty(attr="PFootLocal_L", name="L foot - Loc", default = 0, min = 0.0, max = 1.0)
- bpy.types.Object.FloatProperty(attr="PFootLocal_R", name="R foot - Loc", default = 0, min = 0.0, max = 1.0)
-
- bpy.types.PoseBone.FloatProperty(attr="MHCone", name="Cone", default = 0, min = -0.5, max = 1.0)
- bpy.types.PoseBone.FloatProperty(attr="MHRelax", name="Relax", default = 0, min = -0.5, max = 1.0)
- bpy.types.PoseBone.FloatProperty(attr="MHCurl", name="Curl", default = 0, min = -0.5, max = 1.0)
- bpy.types.PoseBone.FloatProperty(attr="MHLean", name="Lean", default = 0, min = -1.0, max = 1.0)
- bpy.types.PoseBone.FloatProperty(attr="MHScrunch", name="Scrunch", default = 0, min = -0.5, max = 1.0)
- bpy.types.PoseBone.FloatProperty(attr="MHSpread", name="Spread", default = 0, min = -0.5, max = 1.0)
-
- bpy.types.register(MakeHumanFKIKPanel)
- bpy.types.register(MakeHumanFingerPanel)
+ bpy.types.Object.FloatProperty(attr="PArmIK_L", name="L arm - IK", default = 0, min = 0.0, max = 1.0)
+ bpy.types.Object.FloatProperty(attr="PArmIK_R", name="R arm - IK", default = 0, min = 0.0, max = 1.0)
+ bpy.types.Object.FloatProperty(attr="PLegIK_L", name="L leg - IK", default = 0, min = 0.0, max = 1.0)
+ bpy.types.Object.FloatProperty(attr="PLegIK_R", name="R leg - IK", default = 0, min = 0.0, max = 1.0)
+
+ bpy.types.Object.FloatProperty(attr="PHandLocal_L", name="L hand - Loc", default = 0, min = 0.0, max = 1.0)
+ bpy.types.Object.FloatProperty(attr="PHandLocal_R", name="R hand - Loc", default = 0, min = 0.0, max = 1.0)
+ bpy.types.Object.FloatProperty(attr="PFootLocal_L", name="L foot - Loc", default = 0, min = 0.0, max = 1.0)
+ bpy.types.Object.FloatProperty(attr="PFootLocal_R", name="R foot - Loc", default = 0, min = 0.0, max = 1.0)
+
+ bpy.types.PoseBone.FloatProperty(attr="MHCone", name="Cone", default = 0, min = -0.5, max = 1.0)
+ bpy.types.PoseBone.FloatProperty(attr="MHRelax", name="Relax", default = 0, min = -0.5, max = 1.0)
+ bpy.types.PoseBone.FloatProperty(attr="MHCurl", name="Curl", default = 0, min = -0.5, max = 1.0)
+ bpy.types.PoseBone.FloatProperty(attr="MHLean", name="Lean", default = 0, min = -1.0, max = 1.0)
+ bpy.types.PoseBone.FloatProperty(attr="MHScrunch", name="Scrunch", default = 0, min = -0.5, max = 1.0)
+ bpy.types.PoseBone.FloatProperty(attr="MHSpread", name="Spread", default = 0, min = -0.5, max = 1.0)
+
+ bpy.types.register(MakeHumanFKIKPanel)
+ bpy.types.register(MakeHumanFingerPanel)
def unregisterPanels():
- bpy.types.unregister(MakeHumanFKIKPanel)
- bpy.types.unregister(MakeHumanFingerPanel)
- '''
+ bpy.types.unregister(MakeHumanFKIKPanel)
+ bpy.types.unregister(MakeHumanFingerPanel)
+ '''
def menu_func(self, context):
@@ -2212,17 +2215,17 @@ def menu_func(self, context):
def register():
- bpy.types.INFO_MT_file_import.append(menu_func)
+ bpy.types.INFO_MT_file_import.append(menu_func)
def unregister():
- bpy.types.INFO_MT_file_import.remove(menu_func)
+ bpy.types.INFO_MT_file_import.remove(menu_func)
if __name__ == "__main__":
- register()
+ register()
#
-# Testing
+# Testing
#
"""
theScale = 1.0
diff --git a/io_import_scene_unreal_psk.py b/io_import_scene_unreal_psk.py
index 6a04955a..f2844f2c 100644
--- a/io_import_scene_unreal_psk.py
+++ b/io_import_scene_unreal_psk.py
@@ -17,15 +17,18 @@
# ##### END GPL LICENSE BLOCK #####
bl_addon_info = {
- "name": "Import: Unreal Skeleton Mesh(.psk)",
+ "name": "Import Unreal Skeleton Mesh(.psk)",
"author": "Darknet",
- "version": "2.0",
+ "version": (2,0),
"blender": (2, 5, 3),
+ "api": 31667,
"location": "File > Import ",
"description": "Import Unreal Engine (.psk)",
"warning": "",
- "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.5/Py/Scripts/File_I-O/Unreal_psk_psa",
- "tracker_url": "https://projects.blender.org/tracker/index.php?func=detail&aid=21366&group_id=153&atid=469",
+ "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.5/Py/"
+ "Scripts/File_I-O/Unreal_psk_psa",
+ "tracker_url": "https://projects.blender.org/tracker/index.php?"\
+ "func=detail&aid=21366&group_id=153&atid=469",
"category": "Import/Export"}
"""
@@ -71,515 +74,515 @@ def unpack_list(list_of_tuples):
return l
"""
class md5_bone:
- bone_index=0
- name=""
- bindpos=[]
- bindmat = mathutils.Quaternion()
- parent=""
- parent_index=0
- blenderbone=None
- roll=0
+ bone_index=0
+ name=""
+ bindpos=[]
+ bindmat = mathutils.Quaternion()
+ parent=""
+ parent_index=0
+ blenderbone=None
+ roll=0
- def __init__(self):
- self.bone_index=0
- self.name=""
- self.bindpos=[0.0]*3
- self.bindmat=[None]*3 #is this how you initilize a 2d-array
- for i in range(3): self.bindmat[i] = [0.0]*3
- self.parent=""
- self.parent_index=0
- self.blenderbone=None
+ def __init__(self):
+ self.bone_index=0
+ self.name=""
+ self.bindpos=[0.0]*3
+ self.bindmat=[None]*3 #is this how you initilize a 2d-array
+ for i in range(3): self.bindmat[i] = [0.0]*3
+ self.parent=""
+ self.parent_index=0
+ self.blenderbone=None
- def dump(self):
- print ("bone index: ", self.bone_index)
- print ("name: ", self.name)
- print ("bind position: ", self.bindpos)
- print ("bind translation matrix: ", self.bindmat)
- print ("parent: ", self.parent)
- print ("parent index: ", self.parent_index)
- print ("blenderbone: ", self.blenderbone)
+ def dump(self):
+ print ("bone index: ", self.bone_index)
+ print ("name: ", self.name)
+ print ("bind position: ", self.bindpos)
+ print ("bind translation matrix: ", self.bindmat)
+ print ("parent: ", self.parent)
+ print ("parent index: ", self.parent_index)
+ print ("blenderbone: ", self.blenderbone)
"""
class md5_bone:
- bone_index=0
- name=""
- bindpos=[]
- bindmat=[]
- scale = []
- parent=""
- parent_index=0
- blenderbone=None
- roll=0
+ bone_index=0
+ name=""
+ bindpos=[]
+ bindmat=[]
+ scale = []
+ parent=""
+ parent_index=0
+ blenderbone=None
+ roll=0
- def __init__(self):
- self.bone_index=0
- self.name=""
- self.bindpos=[0.0]*3
- self.scale=[0.0]*3
- self.bindmat=[None]*3 #is this how you initilize a 2d-array
- for i in range(3): self.bindmat[i] = [0.0]*3
- self.parent=""
- self.parent_index=0
- self.blenderbone=None
+ def __init__(self):
+ self.bone_index=0
+ self.name=""
+ self.bindpos=[0.0]*3
+ self.scale=[0.0]*3
+ self.bindmat=[None]*3 #is this how you initilize a 2d-array
+ for i in range(3): self.bindmat[i] = [0.0]*3
+ self.parent=""
+ self.parent_index=0
+ self.blenderbone=None
- def dump(self):
- print ("bone index: ", self.bone_index)
- print ("name: ", self.name)
- print ("bind position: ", self.bindpos)
- print ("bind translation matrix: ", self.bindmat)
- print ("parent: ", self.parent)
- print ("parent index: ", self.parent_index)
- print ("blenderbone: ", self.blenderbone)
-
-#http://www.blender.org/forum/viewtopic.php?t=13340&sid=8b17d5de07b17960021bbd72cac0495f
+ def dump(self):
+ print ("bone index: ", self.bone_index)
+ print ("name: ", self.name)
+ print ("bind position: ", self.bindpos)
+ print ("bind translation matrix: ", self.bindmat)
+ print ("parent: ", self.parent)
+ print ("parent index: ", self.parent_index)
+ print ("blenderbone: ", self.blenderbone)
+
+#http://www.blender.org/forum/viewtopic.php?t=13340&sid=8b17d5de07b17960021bbd72cac0495f
def fixRollZ(b):
- v = (b.tail-b.head)/b.length
- b.roll -= math.degrees(math.atan2(v[0]*v[2]*(1 - v[1]),v[0]*v[0] + v[1]*v[2]*v[2]))
+ v = (b.tail-b.head)/b.length
+ b.roll -= math.degrees(math.atan2(v[0]*v[2]*(1 - v[1]),v[0]*v[0] + v[1]*v[2]*v[2]))
def fixRoll(b):
- v = (b.tail-b.head)/b.length
- if v[2]*v[2] > .5:
- #align X-axis
- b.roll += math.degrees(math.atan2(v[0]*v[2]*(1 - v[1]),v[2]*v[2] + v[1]*v[0]*v[0]))
- else:
- #align Z-axis
- b.roll -= math.degrees(math.atan2(v[0]*v[2]*(1 - v[1]),v[0]*v[0] + v[1]*v[2]*v[2]))
-
+ v = (b.tail-b.head)/b.length
+ if v[2]*v[2] > .5:
+ #align X-axis
+ b.roll += math.degrees(math.atan2(v[0]*v[2]*(1 - v[1]),v[2]*v[2] + v[1]*v[0]*v[0]))
+ else:
+ #align Z-axis
+ b.roll -= math.degrees(math.atan2(v[0]*v[2]*(1 - v[1]),v[0]*v[0] + v[1]*v[2]*v[2]))
+
def pskimport(infile):
- global DEBUGLOG
- print ("--------------------------------------------------")
- print ("---------SCRIPT EXECUTING PYTHON IMPORTER---------")
- print ("--------------------------------------------------")
- print ("Importing file: ", infile)
-
- md5_bones=[]
- pskfile = open(infile,'rb')
- if (DEBUGLOG):
- logpath = infile.replace(".psk", ".txt")
- print("logpath:",logpath)
- logf = open(logpath,'w')
-
- def printlog(strdata):
- if (DEBUGLOG):
- logf.write(strdata)
-
- objName = infile.split('\\')[-1].split('.')[0]
-
- me_ob = bpy.data.meshes.new(objName)
- print("objName:",objName)
- printlog(("New Mesh = " + me_ob.name + "\n"))
- #read general header
- indata = unpack('20s3i',pskfile.read(32))
- #not using the general header at this time
- #==================================================================================================
- # vertex point
- #==================================================================================================
- #read the PNTS0000 header
- indata = unpack('20s3i',pskfile.read(32))
- recCount = indata[3]
- printlog(( "Nbr of PNTS0000 records: " + str(recCount) + "\n"))
- counter = 0
- verts = []
- while counter < recCount:
- counter = counter + 1
- indata = unpack('3f',pskfile.read(12))
- #print(indata[0],indata[1],indata[2])
- verts.extend([(indata[0],indata[1],indata[2])])
- #Tmsh.vertices.append(NMesh.Vert(indata[0],indata[1],indata[2]))
-
- #==================================================================================================
- # UV
- #==================================================================================================
- #read the VTXW0000 header
- indata = unpack('20s3i',pskfile.read(32))
- recCount = indata[3]
- printlog( "Nbr of VTXW0000 records: " + str(recCount)+ "\n")
- counter = 0
- UVCoords = []
- #UVCoords record format = [index to PNTS, U coord, v coord]
- while counter < recCount:
- counter = counter + 1
- indata = unpack('hhffhh',pskfile.read(16))
- UVCoords.append([indata[0],indata[2],indata[3]])
- #print([indata[0],indata[2],indata[3]])
- #print([indata[1],indata[2],indata[3]])
-
- #==================================================================================================
- # Face
- #==================================================================================================
- #read the FACE0000 header
- indata = unpack('20s3i',pskfile.read(32))
- recCount = indata[3]
- printlog( "Nbr of FACE0000 records: "+ str(recCount) + "\n")
- #PSK FACE0000 fields: WdgIdx1|WdgIdx2|WdgIdx3|MatIdx|AuxMatIdx|SmthGrp
- #associate MatIdx to an image, associate SmthGrp to a material
- SGlist = []
- counter = 0
- faces = []
- faceuv = []
- while counter < recCount:
- counter = counter + 1
- indata = unpack('hhhbbi',pskfile.read(12))
- #the psk values are: nWdgIdx1|WdgIdx2|WdgIdx3|MatIdx|AuxMatIdx|SmthGrp
- #indata[0] = index of UVCoords
- #UVCoords[indata[0]]=[index to PNTS, U coord, v coord]
- #UVCoords[indata[0]][0] = index to PNTS
- PNTSA = UVCoords[indata[0]][0]
- PNTSB = UVCoords[indata[1]][0]
- PNTSC = UVCoords[indata[2]][0]
- #print(PNTSA,PNTSB,PNTSC) #face id vertex
- #faces.extend([0,1,2,0])
- faces.extend([PNTSA,PNTSB,PNTSC,0])
- uv = []
- u0 = UVCoords[indata[0]][1]
- v0 = UVCoords[indata[0]][2]
- uv.append([u0,v0])
- u1 = UVCoords[indata[1]][1]
- v1 = UVCoords[indata[1]][2]
- uv.append([u1,v1])
- u2 = UVCoords[indata[2]][1]
- v2 = UVCoords[indata[2]][2]
- uv.append([u2,v2])
- faceuv.append(uv)
- #print("UV: ",u0,v0)
- #update the uv var of the last item in the Tmsh.faces list
- # which is the face just added above
- ##Tmsh.faces[-1].uv = [(u0,v0),(u1,v1),(u2,v2)]
- #print("smooth:",indata[5])
- #collect a list of the smoothing groups
- if SGlist.count(indata[5]) == 0:
- SGlist.append(indata[5])
- print("smooth:",indata[5])
- #assign a material index to the face
- #Tmsh.faces[-1].materialIndex = SGlist.index(indata[5])
- printlog( "Using Materials to represent PSK Smoothing Groups...\n")
- #==========
- # skip something...
- #==========
-
- #==================================================================================================
- # Material
- #==================================================================================================
- ##
- #read the MATT0000 header
- indata = unpack('20s3i',pskfile.read(32))
- recCount = indata[3]
- printlog("Nbr of MATT0000 records: " + str(recCount) + "\n" )
- printlog(" - Not importing any material data now. PSKs are texture wrapped! \n")
- counter = 0
- while counter < recCount:
- counter = counter + 1
- indata = unpack('64s6i',pskfile.read(88))
- ##
-
- #==================================================================================================
- # Bones (Armature)
- #==================================================================================================
- #read the REFSKEL0 header
- indata = unpack('20s3i',pskfile.read(32))
- recCount = indata[3]
- printlog( "Nbr of REFSKEL0 records: " + str(recCount) + "\n")
- Bns = []
- bone = []
- nobone = 0
- #==================================================================================================
- # Bone Data
- #==================================================================================================
- counter = 0
- print ("---PRASE--BONES---")
- while counter < recCount:
- indata = unpack('64s3i11f',pskfile.read(120))
- #print( "DATA",str(indata))
- bone.append(indata)
-
- createbone = md5_bone()
- #temp_name = indata[0][:30]
- temp_name = indata[0]
-
- temp_name = bytes.decode(temp_name)
- temp_name = temp_name.lstrip(" ")
- temp_name = temp_name.rstrip(" ")
- temp_name = temp_name.strip()
- temp_name = temp_name.strip( bytes.decode(b'\x00'))
- print ("temp_name:", temp_name, "||")
- createbone.name = temp_name
- createbone.bone_index = counter
- createbone.parent_index = indata[3]
- createbone.bindpos[0] = indata[8]
- createbone.bindpos[1] = indata[9]
- createbone.bindpos[2] = indata[10]
- createbone.scale[0] = indata[12]
- createbone.scale[1] = indata[13]
- createbone.scale[2] = indata[14]
-
- #w,x,y,z
- if (counter == 0):#main parent
- print("no parent bone")
- createbone.bindmat = mathutils.Quaternion((indata[7],indata[4],indata[5],indata[6]))
- #createbone.bindmat = mathutils.Quaternion((indata[7],-indata[4],-indata[5],-indata[6]))
- else:#parent
- print("parent bone")
- createbone.bindmat = mathutils.Quaternion((indata[7],-indata[4],-indata[5],-indata[6]))
- #createbone.bindmat = mathutils.Quaternion((indata[7],indata[4],indata[5],indata[6]))
-
- md5_bones.append(createbone)
- counter = counter + 1
- bnstr = (str(indata[0]))
- Bns.append(bnstr)
-
- for pbone in md5_bones:
- pbone.parent = md5_bones[pbone.parent_index].name
-
- bonecount = 0
- for armbone in bone:
- temp_name = armbone[0][:30]
- #print ("BONE NAME: ",len(temp_name))
- temp_name=str((temp_name))
- #temp_name = temp_name[1]
- #print ("BONE NAME: ",temp_name)
- bonecount +=1
- print ("-------------------------")
- print ("----Creating--Armature---")
- print ("-------------------------")
-
- #================================================================================================
- #Check armature if exist if so create or update or remove all and addnew bone
- #================================================================================================
- #bpy.ops.object.mode_set(mode='OBJECT')
- meshname ="ArmObject"
- objectname = "armaturedata"
- bfound = False
- arm = None
- for obj in bpy.data.objects:
- if (obj.name == meshname):
- bfound = True
- arm = obj
- break
-
- if bfound == False:
- armdata = bpy.data.armatures.new(objectname)
- ob_new = bpy.data.objects.new(meshname, armdata)
- #ob_new = bpy.data.objects.new(meshname, 'ARMATURE')
- #ob_new.data = armdata
- bpy.context.scene.objects.link(ob_new)
- #bpy.ops.object.mode_set(mode='OBJECT')
- for i in bpy.context.scene.objects: i.select = False #deselect all objects
- ob_new.select = True
- #set current armature to edit the bone
- bpy.context.scene.objects.active = ob_new
- #set mode to able to edit the bone
- bpy.ops.object.mode_set(mode='EDIT')
- #newbone = ob_new.data.edit_bones.new('test')
- #newbone.tail.y = 1
- print("creating bone(s)")
- for bone in md5_bones:
- #print(dir(bone))
- newbone = ob_new.data.edit_bones.new(bone.name)
- #parent the bone
- parentbone = None
- print("bone name:",bone.name)
- #note bone location is set in the real space or global not local
- if bone.name != bone.parent:
-
- pos_x = bone.bindpos[0]
- pos_y = bone.bindpos[1]
- pos_z = bone.bindpos[2]
-
- #print( "LINKING:" , bone.parent ,"j")
- parentbone = ob_new.data.edit_bones[bone.parent]
- newbone.parent = parentbone
- rotmatrix = bone.bindmat.to_matrix().resize4x4().rotation_part()
-
- #parent_head = parentbone.head * parentbone.matrix.to_quat().inverse()
- #parent_tail = parentbone.tail * parentbone.matrix.to_quat().inverse()
- #location=Vector(pos_x,pos_y,pos_z)
- #set_position = (parent_tail - parent_head) + location
- #print("tmp head:",set_position)
-
- #pos_x = set_position.x
- #pos_y = set_position.y
- #pos_z = set_position.z
-
- newbone.head.x = parentbone.head.x + pos_x
- newbone.head.y = parentbone.head.y + pos_y
- newbone.head.z = parentbone.head.z + pos_z
- print("head:",newbone.head)
- newbone.tail.x = parentbone.head.x + (pos_x + bonesize * rotmatrix[1][0])
- newbone.tail.y = parentbone.head.y + (pos_y + bonesize * rotmatrix[1][1])
- newbone.tail.z = parentbone.head.z + (pos_z + bonesize * rotmatrix[1][2])
- else:
- rotmatrix = bone.bindmat.to_matrix().resize4x4().rotation_part()
- newbone.head.x = bone.bindpos[0]
- newbone.head.y = bone.bindpos[1]
- newbone.head.z = bone.bindpos[2]
- newbone.tail.x = bone.bindpos[0] + bonesize * rotmatrix[1][0]
- newbone.tail.y = bone.bindpos[1] + bonesize * rotmatrix[1][1]
- newbone.tail.z = bone.bindpos[2] + bonesize * rotmatrix[1][2]
- #print("no parent")
-
- bpy.context.scene.update()
-
- #==================================================================================================
- #END BONE DATA BUILD
- #==================================================================================================
- VtxCol = []
- for x in range(len(Bns)):
- #change the overall darkness of each material in a range between 0.1 and 0.9
- tmpVal = ((float(x)+1.0)/(len(Bns))*0.7)+0.1
- tmpVal = int(tmpVal * 256)
- tmpCol = [tmpVal,tmpVal,tmpVal,0]
- #Change the color of each material slightly
- if x % 3 == 0:
- if tmpCol[0] < 128: tmpCol[0] += 60
- else: tmpCol[0] -= 60
- if x % 3 == 1:
- if tmpCol[1] < 128: tmpCol[1] += 60
- else: tmpCol[1] -= 60
- if x % 3 == 2:
- if tmpCol[2] < 128: tmpCol[2] += 60
- else: tmpCol[2] -= 60
- #Add the material to the mesh
- VtxCol.append(tmpCol)
-
- #==================================================================================================
- # Bone Weight
- #==================================================================================================
- #read the RAWW0000 header
- indata = unpack('20s3i',pskfile.read(32))
- recCount = indata[3]
- printlog( "Nbr of RAWW0000 records: " + str(recCount) +"\n")
- #RAWW0000 fields: Weight|PntIdx|BoneIdx
- RWghts = []
- counter = 0
- while counter < recCount:
- counter = counter + 1
- indata = unpack('fii',pskfile.read(12))
- RWghts.append([indata[1],indata[2],indata[0]])
- #RWghts fields = PntIdx|BoneIdx|Weight
- RWghts.sort()
- printlog( "len(RWghts)=" + str(len(RWghts)) + "\n")
- #Tmsh.update()
-
- #set the Vertex Colors of the faces
- #face.v[n] = RWghts[0]
- #RWghts[1] = index of VtxCol
- """
- for x in range(len(Tmsh.faces)):
- for y in range(len(Tmsh.faces[x].v)):
- #find v in RWghts[n][0]
- findVal = Tmsh.faces[x].v[y].index
- n = 0
- while findVal != RWghts[n][0]:
- n = n + 1
- TmpCol = VtxCol[RWghts[n][1]]
- #check if a vertex has more than one influence
- if n != len(RWghts)-1:
- if RWghts[n][0] == RWghts[n+1][0]:
- #if there is more than one influence, use the one with the greater influence
- #for simplicity only 2 influences are checked, 2nd and 3rd influences are usually very small
- if RWghts[n][2] < RWghts[n+1][2]:
- TmpCol = VtxCol[RWghts[n+1][1]]
- Tmsh.faces[x].col.append(NMesh.Col(TmpCol[0],TmpCol[1],TmpCol[2],0))
- """
- if (DEBUGLOG):
- logf.close()
- #==================================================================================================
- #Building Mesh
- #==================================================================================================
- print("vertex:",len(verts),"faces:",len(faces))
- me_ob.vertices.add(len(verts))
- me_ob.faces.add(len(faces)//4)
+ global DEBUGLOG
+ print ("--------------------------------------------------")
+ print ("---------SCRIPT EXECUTING PYTHON IMPORTER---------")
+ print ("--------------------------------------------------")
+ print ("Importing file: ", infile)
+
+ md5_bones=[]
+ pskfile = open(infile,'rb')
+ if (DEBUGLOG):
+ logpath = infile.replace(".psk", ".txt")
+ print("logpath:",logpath)
+ logf = open(logpath,'w')
+
+ def printlog(strdata):
+ if (DEBUGLOG):
+ logf.write(strdata)
+
+ objName = infile.split('\\')[-1].split('.')[0]
+
+ me_ob = bpy.data.meshes.new(objName)
+ print("objName:",objName)
+ printlog(("New Mesh = " + me_ob.name + "\n"))
+ #read general header
+ indata = unpack('20s3i',pskfile.read(32))
+ #not using the general header at this time
+ #==================================================================================================
+ # vertex point
+ #==================================================================================================
+ #read the PNTS0000 header
+ indata = unpack('20s3i',pskfile.read(32))
+ recCount = indata[3]
+ printlog(( "Nbr of PNTS0000 records: " + str(recCount) + "\n"))
+ counter = 0
+ verts = []
+ while counter < recCount:
+ counter = counter + 1
+ indata = unpack('3f',pskfile.read(12))
+ #print(indata[0],indata[1],indata[2])
+ verts.extend([(indata[0],indata[1],indata[2])])
+ #Tmsh.vertices.append(NMesh.Vert(indata[0],indata[1],indata[2]))
+
+ #==================================================================================================
+ # UV
+ #==================================================================================================
+ #read the VTXW0000 header
+ indata = unpack('20s3i',pskfile.read(32))
+ recCount = indata[3]
+ printlog( "Nbr of VTXW0000 records: " + str(recCount)+ "\n")
+ counter = 0
+ UVCoords = []
+ #UVCoords record format = [index to PNTS, U coord, v coord]
+ while counter < recCount:
+ counter = counter + 1
+ indata = unpack('hhffhh',pskfile.read(16))
+ UVCoords.append([indata[0],indata[2],indata[3]])
+ #print([indata[0],indata[2],indata[3]])
+ #print([indata[1],indata[2],indata[3]])
+
+ #==================================================================================================
+ # Face
+ #==================================================================================================
+ #read the FACE0000 header
+ indata = unpack('20s3i',pskfile.read(32))
+ recCount = indata[3]
+ printlog( "Nbr of FACE0000 records: "+ str(recCount) + "\n")
+ #PSK FACE0000 fields: WdgIdx1|WdgIdx2|WdgIdx3|MatIdx|AuxMatIdx|SmthGrp
+ #associate MatIdx to an image, associate SmthGrp to a material
+ SGlist = []
+ counter = 0
+ faces = []
+ faceuv = []
+ while counter < recCount:
+ counter = counter + 1
+ indata = unpack('hhhbbi',pskfile.read(12))
+ #the psk values are: nWdgIdx1|WdgIdx2|WdgIdx3|MatIdx|AuxMatIdx|SmthGrp
+ #indata[0] = index of UVCoords
+ #UVCoords[indata[0]]=[index to PNTS, U coord, v coord]
+ #UVCoords[indata[0]][0] = index to PNTS
+ PNTSA = UVCoords[indata[0]][0]
+ PNTSB = UVCoords[indata[1]][0]
+ PNTSC = UVCoords[indata[2]][0]
+ #print(PNTSA,PNTSB,PNTSC) #face id vertex
+ #faces.extend([0,1,2,0])
+ faces.extend([PNTSA,PNTSB,PNTSC,0])
+ uv = []
+ u0 = UVCoords[indata[0]][1]
+ v0 = UVCoords[indata[0]][2]
+ uv.append([u0,v0])
+ u1 = UVCoords[indata[1]][1]
+ v1 = UVCoords[indata[1]][2]
+ uv.append([u1,v1])
+ u2 = UVCoords[indata[2]][1]
+ v2 = UVCoords[indata[2]][2]
+ uv.append([u2,v2])
+ faceuv.append(uv)
+ #print("UV: ",u0,v0)
+ #update the uv var of the last item in the Tmsh.faces list
+ # which is the face just added above
+ ##Tmsh.faces[-1].uv = [(u0,v0),(u1,v1),(u2,v2)]
+ #print("smooth:",indata[5])
+ #collect a list of the smoothing groups
+ if SGlist.count(indata[5]) == 0:
+ SGlist.append(indata[5])
+ print("smooth:",indata[5])
+ #assign a material index to the face
+ #Tmsh.faces[-1].materialIndex = SGlist.index(indata[5])
+ printlog( "Using Materials to represent PSK Smoothing Groups...\n")
+ #==========
+ # skip something...
+ #==========
+
+ #==================================================================================================
+ # Material
+ #==================================================================================================
+ ##
+ #read the MATT0000 header
+ indata = unpack('20s3i',pskfile.read(32))
+ recCount = indata[3]
+ printlog("Nbr of MATT0000 records: " + str(recCount) + "\n" )
+ printlog(" - Not importing any material data now. PSKs are texture wrapped! \n")
+ counter = 0
+ while counter < recCount:
+ counter = counter + 1
+ indata = unpack('64s6i',pskfile.read(88))
+ ##
+
+ #==================================================================================================
+ # Bones (Armature)
+ #==================================================================================================
+ #read the REFSKEL0 header
+ indata = unpack('20s3i',pskfile.read(32))
+ recCount = indata[3]
+ printlog( "Nbr of REFSKEL0 records: " + str(recCount) + "\n")
+ Bns = []
+ bone = []
+ nobone = 0
+ #==================================================================================================
+ # Bone Data
+ #==================================================================================================
+ counter = 0
+ print ("---PRASE--BONES---")
+ while counter < recCount:
+ indata = unpack('64s3i11f',pskfile.read(120))
+ #print( "DATA",str(indata))
+ bone.append(indata)
+
+ createbone = md5_bone()
+ #temp_name = indata[0][:30]
+ temp_name = indata[0]
+
+ temp_name = bytes.decode(temp_name)
+ temp_name = temp_name.lstrip(" ")
+ temp_name = temp_name.rstrip(" ")
+ temp_name = temp_name.strip()
+ temp_name = temp_name.strip( bytes.decode(b'\x00'))
+ print ("temp_name:", temp_name, "||")
+ createbone.name = temp_name
+ createbone.bone_index = counter
+ createbone.parent_index = indata[3]
+ createbone.bindpos[0] = indata[8]
+ createbone.bindpos[1] = indata[9]
+ createbone.bindpos[2] = indata[10]
+ createbone.scale[0] = indata[12]
+ createbone.scale[1] = indata[13]
+ createbone.scale[2] = indata[14]
+
+ #w,x,y,z
+ if (counter == 0):#main parent
+ print("no parent bone")
+ createbone.bindmat = mathutils.Quaternion((indata[7],indata[4],indata[5],indata[6]))
+ #createbone.bindmat = mathutils.Quaternion((indata[7],-indata[4],-indata[5],-indata[6]))
+ else:#parent
+ print("parent bone")
+ createbone.bindmat = mathutils.Quaternion((indata[7],-indata[4],-indata[5],-indata[6]))
+ #createbone.bindmat = mathutils.Quaternion((indata[7],indata[4],indata[5],indata[6]))
+
+ md5_bones.append(createbone)
+ counter = counter + 1
+ bnstr = (str(indata[0]))
+ Bns.append(bnstr)
+
+ for pbone in md5_bones:
+ pbone.parent = md5_bones[pbone.parent_index].name
+
+ bonecount = 0
+ for armbone in bone:
+ temp_name = armbone[0][:30]
+ #print ("BONE NAME: ",len(temp_name))
+ temp_name=str((temp_name))
+ #temp_name = temp_name[1]
+ #print ("BONE NAME: ",temp_name)
+ bonecount +=1
+ print ("-------------------------")
+ print ("----Creating--Armature---")
+ print ("-------------------------")
+
+ #================================================================================================
+ #Check armature if exist if so create or update or remove all and addnew bone
+ #================================================================================================
+ #bpy.ops.object.mode_set(mode='OBJECT')
+ meshname ="ArmObject"
+ objectname = "armaturedata"
+ bfound = False
+ arm = None
+ for obj in bpy.data.objects:
+ if (obj.name == meshname):
+ bfound = True
+ arm = obj
+ break
+
+ if bfound == False:
+ armdata = bpy.data.armatures.new(objectname)
+ ob_new = bpy.data.objects.new(meshname, armdata)
+ #ob_new = bpy.data.objects.new(meshname, 'ARMATURE')
+ #ob_new.data = armdata
+ bpy.context.scene.objects.link(ob_new)
+ #bpy.ops.object.mode_set(mode='OBJECT')
+ for i in bpy.context.scene.objects: i.select = False #deselect all objects
+ ob_new.select = True
+ #set current armature to edit the bone
+ bpy.context.scene.objects.active = ob_new
+ #set mode to able to edit the bone
+ bpy.ops.object.mode_set(mode='EDIT')
+ #newbone = ob_new.data.edit_bones.new('test')
+ #newbone.tail.y = 1
+ print("creating bone(s)")
+ for bone in md5_bones:
+ #print(dir(bone))
+ newbone = ob_new.data.edit_bones.new(bone.name)
+ #parent the bone
+ parentbone = None
+ print("bone name:",bone.name)
+ #note bone location is set in the real space or global not local
+ if bone.name != bone.parent:
+
+ pos_x = bone.bindpos[0]
+ pos_y = bone.bindpos[1]
+ pos_z = bone.bindpos[2]
+
+ #print( "LINKING:" , bone.parent ,"j")
+ parentbone = ob_new.data.edit_bones[bone.parent]
+ newbone.parent = parentbone
+ rotmatrix = bone.bindmat.to_matrix().resize4x4().rotation_part()
+
+ #parent_head = parentbone.head * parentbone.matrix.to_quat().inverse()
+ #parent_tail = parentbone.tail * parentbone.matrix.to_quat().inverse()
+ #location=Vector(pos_x,pos_y,pos_z)
+ #set_position = (parent_tail - parent_head) + location
+ #print("tmp head:",set_position)
+
+ #pos_x = set_position.x
+ #pos_y = set_position.y
+ #pos_z = set_position.z
+
+ newbone.head.x = parentbone.head.x + pos_x
+ newbone.head.y = parentbone.head.y + pos_y
+ newbone.head.z = parentbone.head.z + pos_z
+ print("head:",newbone.head)
+ newbone.tail.x = parentbone.head.x + (pos_x + bonesize * rotmatrix[1][0])
+ newbone.tail.y = parentbone.head.y + (pos_y + bonesize * rotmatrix[1][1])
+ newbone.tail.z = parentbone.head.z + (pos_z + bonesize * rotmatrix[1][2])
+ else:
+ rotmatrix = bone.bindmat.to_matrix().resize4x4().rotation_part()
+ newbone.head.x = bone.bindpos[0]
+ newbone.head.y = bone.bindpos[1]
+ newbone.head.z = bone.bindpos[2]
+ newbone.tail.x = bone.bindpos[0] + bonesize * rotmatrix[1][0]
+ newbone.tail.y = bone.bindpos[1] + bonesize * rotmatrix[1][1]
+ newbone.tail.z = bone.bindpos[2] + bonesize * rotmatrix[1][2]
+ #print("no parent")
+
+ bpy.context.scene.update()
+
+ #==================================================================================================
+ #END BONE DATA BUILD
+ #==================================================================================================
+ VtxCol = []
+ for x in range(len(Bns)):
+ #change the overall darkness of each material in a range between 0.1 and 0.9
+ tmpVal = ((float(x)+1.0)/(len(Bns))*0.7)+0.1
+ tmpVal = int(tmpVal * 256)
+ tmpCol = [tmpVal,tmpVal,tmpVal,0]
+ #Change the color of each material slightly
+ if x % 3 == 0:
+ if tmpCol[0] < 128: tmpCol[0] += 60
+ else: tmpCol[0] -= 60
+ if x % 3 == 1:
+ if tmpCol[1] < 128: tmpCol[1] += 60
+ else: tmpCol[1] -= 60
+ if x % 3 == 2:
+ if tmpCol[2] < 128: tmpCol[2] += 60
+ else: tmpCol[2] -= 60
+ #Add the material to the mesh
+ VtxCol.append(tmpCol)
+
+ #==================================================================================================
+ # Bone Weight
+ #==================================================================================================
+ #read the RAWW0000 header
+ indata = unpack('20s3i',pskfile.read(32))
+ recCount = indata[3]
+ printlog( "Nbr of RAWW0000 records: " + str(recCount) +"\n")
+ #RAWW0000 fields: Weight|PntIdx|BoneIdx
+ RWghts = []
+ counter = 0
+ while counter < recCount:
+ counter = counter + 1
+ indata = unpack('fii',pskfile.read(12))
+ RWghts.append([indata[1],indata[2],indata[0]])
+ #RWghts fields = PntIdx|BoneIdx|Weight
+ RWghts.sort()
+ printlog( "len(RWghts)=" + str(len(RWghts)) + "\n")
+ #Tmsh.update()
+
+ #set the Vertex Colors of the faces
+ #face.v[n] = RWghts[0]
+ #RWghts[1] = index of VtxCol
+ """
+ for x in range(len(Tmsh.faces)):
+ for y in range(len(Tmsh.faces[x].v)):
+ #find v in RWghts[n][0]
+ findVal = Tmsh.faces[x].v[y].index
+ n = 0
+ while findVal != RWghts[n][0]:
+ n = n + 1
+ TmpCol = VtxCol[RWghts[n][1]]
+ #check if a vertex has more than one influence
+ if n != len(RWghts)-1:
+ if RWghts[n][0] == RWghts[n+1][0]:
+ #if there is more than one influence, use the one with the greater influence
+ #for simplicity only 2 influences are checked, 2nd and 3rd influences are usually very small
+ if RWghts[n][2] < RWghts[n+1][2]:
+ TmpCol = VtxCol[RWghts[n+1][1]]
+ Tmsh.faces[x].col.append(NMesh.Col(TmpCol[0],TmpCol[1],TmpCol[2],0))
+ """
+ if (DEBUGLOG):
+ logf.close()
+ #==================================================================================================
+ #Building Mesh
+ #==================================================================================================
+ print("vertex:",len(verts),"faces:",len(faces))
+ me_ob.vertices.add(len(verts))
+ me_ob.faces.add(len(faces)//4)
- me_ob.vertices.foreach_set("co", unpack_list(verts))
-
- me_ob.faces.foreach_set("vertices_raw", faces)
- me_ob.faces.foreach_set("use_smooth", [False] * len(me_ob.faces))
- me_ob.update()
-
- #===================================================================================================
- #UV Setup
- #===================================================================================================
- texture = []
- texturename = "text1"
- #print(dir(bpy.data))
- if (len(faceuv) > 0):
- uvtex = me_ob.uv_textures.new() #add one uv texture
- for i, face in enumerate(me_ob.faces):
- blender_tface= uvtex.data[i] #face
- blender_tface.uv1 = faceuv[i][0] #uv = (0,0)
- blender_tface.uv2 = faceuv[i][1] #uv = (0,0)
- blender_tface.uv3 = faceuv[i][2] #uv = (0,0)
- texture.append(uvtex)
-
- #for tex in me_ob.uv_textures:
- #print("mesh tex:",dir(tex))
- #print((tex.name))
-
- #===================================================================================================
- #Material Setup
- #===================================================================================================
- materialname = "mat"
- materials = []
-
- matdata = bpy.data.materials.new(materialname)
- #color is 0 - 1 not in 0 - 255
- #matdata.mirror_color=(float(0.04),float(0.08),float(0.44))
- matdata.diffuse_color=(float(0.04),float(0.08),float(0.44))#blue color
- #print(dir(me_ob.uv_textures[0].data))
- texdata = None
- texdata = bpy.data.textures[len(bpy.data.textures)-1]
- if (texdata != None):
- #print(texdata.name)
- #print(dir(texdata))
- texdata.name = "texturelist1"
- matdata.active_texture = texdata
- materials.append(matdata)
- #matdata = bpy.data.materials.new(materialname)
- #materials.append(matdata)
- #= make sure the list isnt too big
- for material in materials:
- #add material to the mesh list of materials
- me_ob.materials.link(material)
- #===================================================================================================
- #
- #===================================================================================================
- obmesh = bpy.data.objects.new(objName,me_ob)
- #check if there is a material to set to
- if len(materials) > 0:
- obmesh.active_material = materials[0] #material setup tmp
-
- bpy.context.scene.objects.link(obmesh)
-
- bpy.context.scene.update()
-
- print ("PSK2Blender completed")
+ me_ob.vertices.foreach_set("co", unpack_list(verts))
+
+ me_ob.faces.foreach_set("vertices_raw", faces)
+ me_ob.faces.foreach_set("use_smooth", [False] * len(me_ob.faces))
+ me_ob.update()
+
+ #===================================================================================================
+ #UV Setup
+ #===================================================================================================
+ texture = []
+ texturename = "text1"
+ #print(dir(bpy.data))
+ if (len(faceuv) > 0):
+ uvtex = me_ob.uv_textures.new() #add one uv texture
+ for i, face in enumerate(me_ob.faces):
+ blender_tface= uvtex.data[i] #face
+ blender_tface.uv1 = faceuv[i][0] #uv = (0,0)
+ blender_tface.uv2 = faceuv[i][1] #uv = (0,0)
+ blender_tface.uv3 = faceuv[i][2] #uv = (0,0)
+ texture.append(uvtex)
+
+ #for tex in me_ob.uv_textures:
+ #print("mesh tex:",dir(tex))
+ #print((tex.name))
+
+ #===================================================================================================
+ #Material Setup
+ #===================================================================================================
+ materialname = "mat"
+ materials = []
+
+ matdata = bpy.data.materials.new(materialname)
+ #color is 0 - 1 not in 0 - 255
+ #matdata.mirror_color=(float(0.04),float(0.08),float(0.44))
+ matdata.diffuse_color=(float(0.04),float(0.08),float(0.44))#blue color
+ #print(dir(me_ob.uv_textures[0].data))
+ texdata = None
+ texdata = bpy.data.textures[len(bpy.data.textures)-1]
+ if (texdata != None):
+ #print(texdata.name)
+ #print(dir(texdata))
+ texdata.name = "texturelist1"
+ matdata.active_texture = texdata
+ materials.append(matdata)
+ #matdata = bpy.data.materials.new(materialname)
+ #materials.append(matdata)
+ #= make sure the list isnt too big
+ for material in materials:
+ #add material to the mesh list of materials
+ me_ob.materials.link(material)
+ #===================================================================================================
+ #
+ #===================================================================================================
+ obmesh = bpy.data.objects.new(objName,me_ob)
+ #check if there is a material to set to
+ if len(materials) > 0:
+ obmesh.active_material = materials[0] #material setup tmp
+
+ bpy.context.scene.objects.link(obmesh)
+
+ bpy.context.scene.update()
+
+ print ("PSK2Blender completed")
#End of def pskimport#########################
def getInputFilename(filename):
- checktype = filename.split('\\')[-1].split('.')[1]
- print ("------------",filename)
- if checktype.upper() != 'PSK':
- print (" Selected file = ",filename)
- raise (IOError, "The selected input file is not a *.psk file")
- pskimport(filename)
+ checktype = filename.split('\\')[-1].split('.')[1]
+ print ("------------",filename)
+ if checktype.upper() != 'PSK':
+ print (" Selected file = ",filename)
+ raise (IOError, "The selected input file is not a *.psk file")
+ pskimport(filename)
from bpy.props import *
class IMPORT_OT_psk(bpy.types.Operator):
- '''Load a skeleton mesh psk File'''
- bl_idname = "import_scene.psk"
- bl_label = "Import PSK"
+ '''Load a skeleton mesh psk File'''
+ bl_idname = "import_scene.psk"
+ bl_label = "Import PSK"
- # List of operator properties, the attributes will be assigned
- # to the class instance from the operator settings before calling.
- filepath = StringProperty(name="File Path", description="Filepath used for importing the OBJ file", maxlen= 1024, default= "")
+ # List of operator properties, the attributes will be assigned
+ # to the class instance from the operator settings before calling.
+ filepath = StringProperty(name="File Path", description="Filepath used for importing the OBJ file", maxlen= 1024, default= "")
- def execute(self, context):
- getInputFilename(self.properties.filepath)
- return {'FINISHED'}
+ def execute(self, context):
+ getInputFilename(self.properties.filepath)
+ return {'FINISHED'}
- def invoke(self, context, event):
- wm = context.manager
- wm.add_fileselect(self)
- return {'RUNNING_MODAL'}
+ def invoke(self, context, event):
+ wm = context.manager
+ wm.add_fileselect(self)
+ return {'RUNNING_MODAL'}
def menu_func(self, context):
self.layout.operator(IMPORT_OT_psk.bl_idname, text="Skeleton Mesh (.psk)")
@@ -592,8 +595,8 @@ def unregister():
bpy.types.INFO_MT_file_import.remove(menu_func)
if __name__ == "__main__":
- register()
+ register()
-#note this only read the data and will not be place in the scene
+#note this only read the data and will not be place in the scene
#getInputFilename('C:\\blenderfiles\\BotA.psk')
#getInputFilename('C:\\blenderfiles\\AA.PSK')
diff --git a/io_mesh_raw/__init__.py b/io_mesh_raw/__init__.py
index 2e4cf72f..d52a5808 100644
--- a/io_mesh_raw/__init__.py
+++ b/io_mesh_raw/__init__.py
@@ -17,15 +17,18 @@
# ##### END GPL LICENSE BLOCK #####
bl_addon_info = {
- "name": "Import/Export: Raw mesh",
+ "name": "Raw mesh",
"author": "Anthony D,Agostino (Scorpius), Aurel Wildfellner",
- "version": "0.2",
+ "version": (0,2),
"blender": (2, 5, 3),
+ "api": 31667,
"location": "File > Import/Export > Raw faces ",
"description": "Import Raw Faces (.raw format)",
"warning": "",
- "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.5/Py/Scripts/File_I-O/Raw_Mesh_IO",
- "tracker_url": "https://projects.blender.org/tracker/index.php?func=detail&aid=21733&group_id=153&atid=469",
+ "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.5/Py/"\
+ "Scripts/File_I-O/Raw_Mesh_IO",
+ "tracker_url": "https://projects.blender.org/tracker/index.php?"\
+ "func=detail&aid=21733&group_id=153&atid=469",
"category": "Import/Export"}
import bpy
diff --git a/io_mesh_stl/__init__.py b/io_mesh_stl/__init__.py
index a5450cec..3c2669d9 100644
--- a/io_mesh_stl/__init__.py
+++ b/io_mesh_stl/__init__.py
@@ -17,15 +17,18 @@
# ##### END GPL LICENSE BLOCK #####
bl_addon_info = {
- "name": "Import/Export: STL format",
+ "name": "STL format",
"author": "Guillaume Bouchard (Guillaum)",
- "version": "1",
+ "version": (1,),
"blender": (2, 5, 3),
+ "api": 31667,
"location": "File > Import/Export > Stl",
- "description": "Import/Export Stl files",
+ "description": "Import/Export STL files",
"warning": "",
- "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.5/Py/Scripts/File I-O/STL",
- "tracker_url": "https://projects.blender.org/tracker/index.php?func=detail&aid=22837&group_id=153&atid=469",
+ "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.5/Py/"\
+ "Scripts/File I-O/STL",
+ "tracker_url": "https://projects.blender.org/tracker/index.php?"
+ "func=detail&aid=22837&group_id=153&atid=469",
"category": "Import/Export"}
# @todo write the wiki page
diff --git a/mesh_relax.py b/mesh_relax.py
index 717a1099..1a971a93 100644
--- a/mesh_relax.py
+++ b/mesh_relax.py
@@ -22,15 +22,18 @@
# ***** END GPL LICENCE BLOCK *****
bl_addon_info = {
- "name": "Mesh: Relax",
+ "name": "Relax",
"author": "Fabian Fricke",
- "version": "1.1 2010/04/22",
+ "version": (1,1),
"blender": (2, 5, 3),
+ "api": 31667,
"location": "View3D > Specials > Relax ",
"description": "Relax the selected verts while retaining the shape",
"warning": "",
- "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.5/Py/Scripts/Modeling/Relax",
- "tracker_url": "https://projects.blender.org/tracker/index.php?func=detail&aid=21421&group_id=153&atid=469",
+ "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.5/Py/"\
+ "Scripts/Modeling/Relax",
+ "tracker_url": "https://projects.blender.org/tracker/index.php?"\
+ "func=detail&aid=21421&group_id=153&atid=469",
"category": "Mesh"}
"""
diff --git a/mesh_surface_sketch.py b/mesh_surface_sketch.py
index 64787ef3..5aa7eadf 100644
--- a/mesh_surface_sketch.py
+++ b/mesh_surface_sketch.py
@@ -17,15 +17,18 @@
# ##### END GPL LICENSE BLOCK #####
bl_addon_info = {
- "name": "Mesh: Surface Sketch",
+ "name": "Surface Sketch",
"author": "Eclectiel",
- "version": "0.8 Beta",
+ "version": (0,8),
"blender": (2, 5, 3),
+ "api": 31667,
"location": "View3D > EditMode > ToolShelf",
"description": "Draw meshes and re-topologies with Grease Pencil",
- "warning": "",
- "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.5/Py/Scripts/Mesh/Surface_Sketch",
- "tracker_url": "https://projects.blender.org/tracker/index.php?func=detail&aid=22062&group_id=153&atid=469",
+ "warning": "Beta",
+ "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.5/Py/"\
+ "Scripts/Mesh/Surface_Sketch",
+ "tracker_url": "https://projects.blender.org/tracker/index.php?"\
+ "func=detail&aid=22062&group_id=153&atid=469",
"category": "Mesh"}
diff --git a/object_add_chain.py b/object_add_chain.py
index 25c85080..0cc997b9 100644
--- a/object_add_chain.py
+++ b/object_add_chain.py
@@ -17,15 +17,18 @@
# ##### END GPL LICENSE BLOCK #####
bl_addon_info = {
- "name": "Object: Add Chain",
+ "name": "Add Chain",
"author": "Brian Hinton (Nichod)",
- "version": "0.1",
+ "version": (0,1),
"blender": (2, 5, 3),
+ "api": 31667,
"location": "View3D > Add > Mesh > Chain",
"description": "Adds Chain with curve guide for easy creation",
"warning": "",
- "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.5/Py/Scripts/Object/Add_Chain",
- "tracker_url": "https://projects.blender.org/tracker/index.php?func=detail&aid=22203&group_id=153&atid=469",
+ "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.5/Py/"\
+ "Scripts/Object/Add_Chain",
+ "tracker_url": "https://projects.blender.org/tracker/index.php?"\
+ "func=detail&aid=22203&group_id=153&atid=469",
"category": "Object"}
import bpy
diff --git a/object_cloud_gen.py b/object_cloud_gen.py
index 89c61be9..d5203705 100644
--- a/object_cloud_gen.py
+++ b/object_cloud_gen.py
@@ -17,15 +17,18 @@
# ##### END GPL LICENSE BLOCK #####
bl_addon_info = {
- "name": "Object: Cloud Generator",
+ "name": "Cloud Generator",
"author": "Nick Keeline(nrk)",
- "version": "0.7",
+ "version": (0,7),
"blender": (2, 5, 3),
+ "api": 31667,
"location": "Tool Shelf ",
"description": "Creates Volumetric Clouds",
"warning": "returns error at line 542",
- "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.5/Py/Scripts/Object/Cloud_Gen",
- "tracker_url": "https://projects.blender.org/tracker/index.php?func=detail&aid=22015&group_id=153&atid=469",
+ "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.5/Py/"\
+ "Scripts/Object/Cloud_Gen",
+ "tracker_url": "https://projects.blender.org/tracker/index.php?"\
+ "func=detail&aid=22015&group_id=153&atid=469",
"category": "Object"}
"""
diff --git a/object_fracture/__init__.py b/object_fracture/__init__.py
index 6ab7591a..182298fa 100644
--- a/object_fracture/__init__.py
+++ b/object_fracture/__init__.py
@@ -17,15 +17,18 @@
# ##### END GPL LICENSE BLOCK #####
bl_addon_info = {
- "name": "Object: Fracture Tools",
+ "name": "Fracture Tools",
"author": "pildanovak",
- "version": "2.0",
+ "version": (2,0),
"blender": (2, 5, 3),
+ "api": 31667,
"location": "Fracture tools (Search > Fracture Object & Add -> Fracture Helper Objects",
"description": "Fractured Object, Bomb, Projectile, Recorder",
"warning": "script is returning errors",
- "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.5/Py/Scripts/Object/Fracture",
- "tracker_url": "https://projects.blender.org/tracker/index.php?func=detail&aid=21793&group_id=153&atid=469",
+ "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.5/Py/"\
+ "Scripts/Object/Fracture",
+ "tracker_url": "https://projects.blender.org/tracker/index.php?"\
+ "func=detail&aid=21793&group_id=153&atid=469",
"category": "Object"}
diff --git a/render_povray/__init__.py b/render_povray/__init__.py
index 71f79ac7..983f2eb0 100644
--- a/render_povray/__init__.py
+++ b/render_povray/__init__.py
@@ -17,15 +17,18 @@
# ##### END GPL LICENSE BLOCK #####
bl_addon_info = {
- "name": "Render: PovRay",
+ "name": "PovRay",
"author": "Campbell Barton",
- "version": "0.1",
+ "version": (0,1),
"blender": (2, 5, 4),
+ "api": 31667,
"location": "Info Header (engine dropdown)",
"description": "Basic povray integration for blender",
"warning": "",
- "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.5/Py/Scripts/Render/PovRay",
- "tracker_url": "https://projects.blender.org/tracker/index.php?func=detail&aid=23145&group_id=153&atid=469",
+ "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.5/Py/"\
+ "Scripts/Render/PovRay",
+ "tracker_url": "https://projects.blender.org/tracker/index.php?"\
+ "func=detail&aid=23145&group_id=153&atid=469",
"category": "Render"}
try:
diff --git a/render_renderfarmfi.py b/render_renderfarmfi.py
index 54c05442..5a90e28c 100644
--- a/render_renderfarmfi.py
+++ b/render_renderfarmfi.py
@@ -17,15 +17,18 @@
# ##### END GPL LICENSE BLOCK #####
bl_addon_info = {
- "name": "Render: Renderfarm.fi",
+ "name": "Renderfarm.fi",
"author": "Nathan Letwory <nathan@letworyinteractive.com>, Jesse Kaukonen <jesse.kaukonen@gmail.com>",
- "version": "3",
+ "version": (3,),
"blender": (2, 5, 3),
+ "api": 31667,
"location": "Render > Engine > Renderfarm.fi",
"description": "Send .blend as session to http://www.renderfarm.fi to render",
"warning": "",
- "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.5/Py/Scripts/Render/Renderfarm.fi",
- "tracker_url": "https://projects.blender.org/tracker/index.php?func=detail&aid=22927&group_id=153&atid=469",
+ "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.5/Py/"\
+ "Scripts/Render/Renderfarm.fi",
+ "tracker_url": "https://projects.blender.org/tracker/index.php?"\
+ "func=detail&aid=22927&group_id=153&atid=469",
"category": "Render"}
"""
@@ -40,7 +43,7 @@ import xmlrpc.client
import math
from os.path import abspath, isabs, join, isfile
-bpy.CURRENT_VERSION = int(bl_addon_info["version"])
+bpy.CURRENT_VERSION = bl_addon_info["version"][0]
bpy.found_newer_version = False
bpy.up_to_date = False
bpy.download_location = 'http://www.renderfarm.fi/blender'
diff --git a/space_view3d_align_tools.py b/space_view3d_align_tools.py
index 538b776b..0610920a 100644
--- a/space_view3d_align_tools.py
+++ b/space_view3d_align_tools.py
@@ -20,15 +20,18 @@
# ***** END GPL LICENCE BLOCK *****
bl_addon_info = {
- "name": "3D View: Align Tools",
+ "name": "Align Tools",
"author": "Gabriel Beaudin (gabhead)",
- "version": "0.1",
+ "version": (0,1),
"blender": (2, 5, 3),
+ "api": 31667,
"location": "Tool Shelf",
"description": "Align selected objects to the active object",
"warning": "",
- "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.5/Py/Scripts/3D interaction/Align_Tools",
- "tracker_url": "https://projects.blender.org/tracker/index.php?func=detail&aid==22389&group_id=153&atid=468",
+ "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.5/Py/"\
+ "Scripts/3D interaction/Align_Tools",
+ "tracker_url": "https://projects.blender.org/tracker/index.php?"\
+ "func=detail&aid==22389&group_id=153&atid=468",
"category": "3D View"}
"""Align Selected Objects"""
diff --git a/space_view3d_materials_utils.py b/space_view3d_materials_utils.py
index 88ac72f5..ba5b6818 100644
--- a/space_view3d_materials_utils.py
+++ b/space_view3d_materials_utils.py
@@ -23,15 +23,18 @@
# ##### END GPL LICENSE BLOCK #####
bl_addon_info = {
- "name": "3D View: Material Utils",
+ "name": "Material Utils",
"author": "michaelw",
- "version": "1.3",
+ "version": (1,3),
"blender": (2, 5, 3),
+ "api": 31667,
"location": "View3D > Q key",
"description": "Menu of material tools (assign, select by etc) in the 3D View",
"warning": "",
- "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.5/Py/Scripts/3D interaction/Materials Utils",
- "tracker_url": "https://projects.blender.org/tracker/index.php?func=detail&aid=22140&group_id=153&atid=469",
+ "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.5/Py/"\
+ "Scripts/3D interaction/Materials Utils",
+ "tracker_url": "https://projects.blender.org/tracker/index.php?"\
+ "func=detail&aid=22140&group_id=153&atid=469",
"category": "3D View"}
"""
diff --git a/space_view3d_panel_measure.py b/space_view3d_panel_measure.py
index 194fb066..1b9c5b8a 100644
--- a/space_view3d_panel_measure.py
+++ b/space_view3d_panel_measure.py
@@ -17,10 +17,11 @@
# ##### END GPL LICENSE BLOCK #####
bl_addon_info = {
- "name": "3D View: Measure Panel",
+ "name": "Measure Panel",
"author": "Buerbaum Martin (Pontiac)",
- "version": "0.7.7",
+ "version": (0,7,7),
"blender": (2, 5, 3),
+ "api": 31667,
"location": "View3D > Properties > Measure",
"description": "Measure distances between objects",
"warning": "",
@@ -57,7 +58,7 @@ It's very helpful to use one or two "Empty" objects with
"Snap during transform" enabled for fast measurement.
Version history:
-v0.7.7 - One more change to the callback registration code.
+v0.7.7 - One more change to he callback registration code.
Now it should finally work as intended.
v0.7.6 - API changes (r885, r886) - register & unregister function
v0.7.5.3 - Small fix for bug in v0.7.5.1
diff --git a/space_view3d_property_chart.py b/space_view3d_property_chart.py
index e7203eac..290b2941 100644
--- a/space_view3d_property_chart.py
+++ b/space_view3d_property_chart.py
@@ -19,15 +19,18 @@
# ***** END GPL LICENCE BLOCK *****
bl_addon_info = {
- "name": "3D View: Object Property Chart",
+ "name": "Object Property Chart",
"author": "Campbell Barton (ideasman42)",
- "version": "0.1",
+ "version": (0,1),
"blender": (2, 5, 3),
+ "api": 31667,
"location": "Tool Shelf",
"description": "Edit arbitrary selected properties for objects of the same type",
"warning": "",
- "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.5/Py/Scripts/3D interaction/Object Property Chart",
- "tracker_url": "https://projects.blender.org/tracker/index.php?func=detail&aid=22701&group_id=153&atid=469",
+ "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.5/Py/"\
+ "Scripts/3D interaction/Object Property Chart",
+ "tracker_url": "https://projects.blender.org/tracker/index.php?"\
+ "func=detail&aid=22701&group_id=153&atid=469",
"category": "3D View"}
"""List properties of selected objects"""
diff --git a/space_view3d_spacebar_menu.py b/space_view3d_spacebar_menu.py
index dc4b851e..f41e51d8 100644
--- a/space_view3d_spacebar_menu.py
+++ b/space_view3d_spacebar_menu.py
@@ -23,15 +23,18 @@
# ##### END GPL LICENSE BLOCK #####
bl_addon_info = {
- "name": "3D View: Dynamic Spacebar Menu",
+ "name": "Dynamic Spacebar Menu",
"author": "JayDez, sim88, meta-androcto", "sam"
- "version": "1.5",
+ "version": (1,5),
"blender": (2, 5, 3),
+ "api": 31667,
"location": "View3D > Spacebar",
"description": "Context sensitive spacebar menu",
"warning": "",
- "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.5/Py/Scripts/3D_interaction/Dynamic_Spacebar_Menu",
- "tracker_url": "https://projects.blender.org/tracker/index.php?func=detail&aid=22060&group_id=153&atid=469",
+ "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.5/Py/"\
+ "Scripts/3D_interaction/Dynamic_Spacebar_Menu",
+ "tracker_url": "https://projects.blender.org/tracker/index.php?"\
+ "func=detail&aid=22060&group_id=153&atid=469",
"category": "3D View"}
"""
diff --git a/system_blend_info.py b/system_blend_info.py
index 2c186b83..d137e44c 100644
--- a/system_blend_info.py
+++ b/system_blend_info.py
@@ -21,10 +21,11 @@
# ***** END GPL LICENCE BLOCK *****
bl_addon_info = {
- "name": "System: Scene Information",
+ "name": "Scene Information",
"author": "uselessdreamer",
- "version": "0.3",
+ "version": (0,3),
"blender": (2, 5, 3),
+ "api": 31667,
"location": "Properties space > Scene tab > Blend Info panel",
"description": "Show information about the .blend",
"warning": "",