Welcome to mirror list, hosted at ThFree Co, Russian Federation.

git.blender.org/blender.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorTamito Kajiyama <rd6t-kjym@asahi-net.or.jp>2011-01-09 20:56:26 +0300
committerTamito Kajiyama <rd6t-kjym@asahi-net.or.jp>2011-01-09 20:56:26 +0300
commite6f92e2b91db2c4cccf68e08af991043d3401ae6 (patch)
tree8b70a4130cf08a45e8afbe4f31f3e98e166dd51f
parent0ec378f3fea3be9235b5bc8cb976d6126ae808f5 (diff)
parent7bc46973d2f150faef4b210b19172ddbfd35c6ef (diff)
Merged changes in the trunk up to revision 34201.
-rw-r--r--CMakeLists.txt2
-rw-r--r--intern/audaspace/Python/AUD_PyAPI.cpp6
-rw-r--r--release/scripts/op/io_anim_bvh/export_bvh.py11
-rw-r--r--release/scripts/op/io_scene_3ds/import_3ds.py10
-rw-r--r--release/scripts/op/io_scene_fbx/export_fbx.py859
-rw-r--r--release/scripts/op/io_shape_mdd/export_mdd.py11
-rw-r--r--release/scripts/op/uvcalc_smart_project.py6
-rw-r--r--source/blender/blenkernel/intern/depsgraph.c2
-rw-r--r--source/blender/blenkernel/intern/key.c8
-rw-r--r--source/blender/blenkernel/intern/object.c2
-rw-r--r--source/blender/blenlib/BLI_utildefines.h12
-rw-r--r--source/blender/editors/interface/interface_icons.c2
-rw-r--r--source/blender/editors/interface/resources.c2
-rw-r--r--source/blender/editors/space_view3d/drawobject.c16
-rw-r--r--source/blender/editors/transform/transform_manipulator.c2
-rw-r--r--source/blender/makesrna/intern/rna_access.c4
-rw-r--r--source/blender/python/generic/IDProp.c4
-rw-r--r--source/blender/python/generic/bgl.c6
-rw-r--r--source/blender/python/generic/mathutils_matrix.c94
-rw-r--r--source/blender/python/generic/mathutils_matrix.h2
-rw-r--r--source/blender/python/generic/mathutils_quat.c2
-rw-r--r--source/blender/python/generic/mathutils_vector.c4
-rw-r--r--source/blender/python/intern/bpy_interface.c4
-rw-r--r--source/blender/python/intern/bpy_props.c2
-rw-r--r--source/blender/python/intern/bpy_rna.c16
-rw-r--r--source/blender/python/intern/bpy_rna_array.c10
-rw-r--r--source/blender/windowmanager/intern/wm_files.c2
-rw-r--r--source/gameengine/VideoTexture/FilterBlueScreen.cpp4
-rw-r--r--source/gameengine/VideoTexture/FilterColor.cpp8
-rw-r--r--source/gameengine/VideoTexture/ImageRender.cpp2
-rw-r--r--source/gameengine/VideoTexture/ImageViewport.cpp4
-rw-r--r--source/gameengine/VideoTexture/VideoBase.cpp2
32 files changed, 542 insertions, 579 deletions
diff --git a/CMakeLists.txt b/CMakeLists.txt
index 772bca71fb5..3a17eaf3e93 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -129,7 +129,7 @@ option(WITH_PYTHON_INSTALL "Copy system python into the blender install fo
option(WITH_CXX_GUARDEDALLOC "Enable GuardedAlloc for C++ memory allocation tracking" OFF)
mark_as_advanced(WITH_CXX_GUARDEDALLOC)
-option(WITH_ASSERT_ABORT "Call abort() when raising an assertion through BKE_assert()" OFF)
+option(WITH_ASSERT_ABORT "Call abort() when raising an assertion through BLI_assert()" OFF)
mark_as_advanced(WITH_ASSERT_ABORT)
if(APPLE)
diff --git a/intern/audaspace/Python/AUD_PyAPI.cpp b/intern/audaspace/Python/AUD_PyAPI.cpp
index d3f42419494..9620f26cc33 100644
--- a/intern/audaspace/Python/AUD_PyAPI.cpp
+++ b/intern/audaspace/Python/AUD_PyAPI.cpp
@@ -851,7 +851,7 @@ Factory_filter(Factory* self, PyObject* args)
return NULL;
}
- if(!PySequence_Length(py_b) || (py_a != NULL && !PySequence_Length(py_a)))
+ if(!PySequence_Size(py_b) || (py_a != NULL && !PySequence_Size(py_a)))
{
PyErr_SetString(PyExc_ValueError, "The sequence has to contain at least one value!");
return NULL;
@@ -862,7 +862,7 @@ Factory_filter(Factory* self, PyObject* args)
float value;
int result;
- for(int i = 0; i < PySequence_Length(py_b); i++)
+ for(int i = 0; i < PySequence_Size(py_b); i++)
{
py_value = PySequence_GetItem(py_b, i);
result = PyArg_Parse(py_value, "f:filter", &value);
@@ -876,7 +876,7 @@ Factory_filter(Factory* self, PyObject* args)
if(py_a)
{
- for(int i = 0; i < PySequence_Length(py_a); i++)
+ for(int i = 0; i < PySequence_Size(py_a); i++)
{
py_value = PySequence_GetItem(py_a, i);
result = PyArg_Parse(py_value, "f:filter", &value);
diff --git a/release/scripts/op/io_anim_bvh/export_bvh.py b/release/scripts/op/io_anim_bvh/export_bvh.py
index 334f47ca14a..dc7b4207c73 100644
--- a/release/scripts/op/io_anim_bvh/export_bvh.py
+++ b/release/scripts/op/io_anim_bvh/export_bvh.py
@@ -76,7 +76,10 @@ def write_armature(context, filepath, frame_start, frame_end, global_scale=1.0):
file.write("%s{\n" % indent_str)
file.write("%s\tOFFSET %.6f %.6f %.6f\n" % (indent_str, loc.x * global_scale, loc.y * global_scale, loc.z * global_scale))
- file.write("%s\tCHANNELS 6 Xposition Yposition Zposition Xrotation Yrotation Zrotation\n" % indent_str)
+ if bone.use_connect and bone.parent:
+ file.write("%s\tCHANNELS 3 Xrotation Yrotation Zrotation\n" % indent_str)
+ else:
+ file.write("%s\tCHANNELS 6 Xposition Yposition Zposition Xrotation Yrotation Zrotation\n" % indent_str)
if my_children:
# store the location for the children
@@ -133,6 +136,7 @@ def write_armature(context, filepath, frame_start, frame_end, global_scale=1.0):
"rest_arm_imat", # rest_arm_mat inverted
"rest_local_imat", # rest_local_mat inverted
"prev_euler", # last used euler to preserve euler compability in between keyframes
+ "connected", # is the bone connected to the parent bone?
)
def __init__(self, bone_name):
@@ -153,6 +157,7 @@ def write_armature(context, filepath, frame_start, frame_end, global_scale=1.0):
self.parent = None
self.prev_euler = Euler((0.0, 0.0, 0.0))
+ self.connected = (self.rest_bone.use_connect and self.rest_bone.parent)
def update_posedata(self):
self.pose_mat = self.pose_bone.matrix
@@ -206,7 +211,9 @@ def write_armature(context, filepath, frame_start, frame_end, global_scale=1.0):
# keep eulers compatible, no jumping on interpolation.
rot = mat_final.rotation_part().invert().to_euler('XYZ', dbone.prev_euler)
- file.write("%.6f %.6f %.6f " % (loc * global_scale)[:])
+ if not dbone.connected:
+ file.write("%.6f %.6f %.6f " % (loc * global_scale)[:])
+
file.write("%.6f %.6f %.6f " % (-degrees(rot[0]), -degrees(rot[1]), -degrees(rot[2])))
dbone.prev_euler = rot
diff --git a/release/scripts/op/io_scene_3ds/import_3ds.py b/release/scripts/op/io_scene_3ds/import_3ds.py
index d3f5a5d3b2f..7ffdcd3c853 100644
--- a/release/scripts/op/io_scene_3ds/import_3ds.py
+++ b/release/scripts/op/io_scene_3ds/import_3ds.py
@@ -612,11 +612,11 @@ def process_next_chunk(file, previous_chunk, importedObjects, IMAGE_SEARCH):
data = list( struct.unpack('<ffffffffffff', temp_data) )
new_chunk.bytes_read += STRUCT_SIZE_4x3MAT
- contextMatrix_rot = mathutils.Matrix(\
- data[:3] + [0],\
- data[3:6] + [0],\
- data[6:9] + [0],\
- data[9:] + [1])
+ contextMatrix_rot = mathutils.Matrix((data[:3] + [0], \
+ data[3:6] + [0], \
+ data[6:9] + [0], \
+ data[9:] + [1], \
+ ))
elif (new_chunk.ID == MAT_MAP_FILEPATH):
texture_name, read_str_len = read_string(file)
diff --git a/release/scripts/op/io_scene_fbx/export_fbx.py b/release/scripts/op/io_scene_fbx/export_fbx.py
index 0be6a01aef2..40cac6ddb4d 100644
--- a/release/scripts/op/io_scene_fbx/export_fbx.py
+++ b/release/scripts/op/io_scene_fbx/export_fbx.py
@@ -28,16 +28,17 @@ http://wiki.blender.org/index.php/Scripts/Manual/Export/autodesk_fbx
import os
import time
-import math # math.pi
-import shutil # for file copying
+import math # math.pi
+import shutil # for file copying
import bpy
from mathutils import Vector, Euler, Matrix
+
# XXX not used anymore, images are copied one at a time
def copy_images(dest_dir, textures):
import shutil
-
+
if not dest_dir.endswith(os.sep):
dest_dir += os.sep
@@ -51,16 +52,17 @@ def copy_images(dest_dir, textures):
if Blender.sys.exists(image_path):
# Make a name for the target path.
dest_image_path = dest_dir + image_path.split('\\')[-1].split('/')[-1]
- if not Blender.sys.exists(dest_image_path): # Image isnt already there
+ if not Blender.sys.exists(dest_image_path): # Image isnt already there
print("\tCopying %r > %r" % (image_path, dest_image_path))
try:
shutil.copy(image_path, dest_image_path)
- copyCount+=1
+ copyCount += 1
except:
print("\t\tWarning, file failed to copy, skipping.")
print('\tCopied %d images' % copyCount)
+
# I guess FBX uses degrees instead of radians (Arystan).
# Call this function just before writing to FBX.
# 180 / math.pi == 57.295779513
@@ -81,30 +83,31 @@ sane_name_mapping_group = {}
sane_name_mapping_ob['Scene'] = 'Scene_'
sane_name_mapping_ob['blend_root'] = 'blend_root_'
+
def increment_string(t):
name = t
num = ''
while name and name[-1].isdigit():
num = name[-1] + num
name = name[:-1]
- if num: return '%s%d' % (name, int(num)+1)
- else: return name + '_0'
-
+ if num:
+ return '%s%d' % (name, int(num) + 1)
+ else:
+ return name + '_0'
# todo - Disallow the name 'Scene' and 'blend_root' - it will bugger things up.
def sane_name(data, dct):
#if not data: return None
- if type(data)==tuple: # materials are paired up with images
+ if type(data) == tuple: # materials are paired up with images
data, other = data
use_other = True
else:
other = None
use_other = False
- if data: name = data.name
- else: name = None
+ name = data.name if data else None
orig_name = name
if other:
@@ -119,25 +122,40 @@ def sane_name(data, dct):
#except: pass
if not name:
- name = 'unnamed' # blank string, ASKING FOR TROUBLE!
+ name = 'unnamed' # blank string, ASKING FOR TROUBLE!
else:
- name = bpy.path.clean_name(name) # use our own
+ name = bpy.path.clean_name(name) # use our own
- while name in iter(dct.values()): name = increment_string(name)
+ while name in iter(dct.values()):
+ name = increment_string(name)
- if use_other: # even if other is None - orig_name_other will be a string or None
+ if use_other: # even if other is None - orig_name_other will be a string or None
dct[orig_name, orig_name_other] = name
else:
dct[orig_name] = name
return name
-def sane_obname(data): return sane_name(data, sane_name_mapping_ob)
-def sane_matname(data): return sane_name(data, sane_name_mapping_mat)
-def sane_texname(data): return sane_name(data, sane_name_mapping_tex)
-def sane_takename(data): return sane_name(data, sane_name_mapping_take)
-def sane_groupname(data): return sane_name(data, sane_name_mapping_group)
+
+def sane_obname(data):
+ return sane_name(data, sane_name_mapping_ob)
+
+
+def sane_matname(data):
+ return sane_name(data, sane_name_mapping_mat)
+
+
+def sane_texname(data):
+ return sane_name(data, sane_name_mapping_tex)
+
+
+def sane_takename(data):
+ return sane_name(data, sane_name_mapping_take)
+
+
+def sane_groupname(data):
+ return sane_name(data, sane_name_mapping_group)
# def derived_paths(fname_orig, basepath, FORCE_CWD=False):
# '''
@@ -160,7 +178,8 @@ def sane_groupname(data): return sane_name(data, sane_name_mapping_group)
def mat4x4str(mat):
- return '%.15f,%.15f,%.15f,%.15f,%.15f,%.15f,%.15f,%.15f,%.15f,%.15f,%.15f,%.15f,%.15f,%.15f,%.15f,%.15f' % tuple([ f for v in mat for f in v ])
+ return '%.15f,%.15f,%.15f,%.15f,%.15f,%.15f,%.15f,%.15f,%.15f,%.15f,%.15f,%.15f,%.15f,%.15f,%.15f,%.15f' % tuple([f for v in mat for f in v])
+
# XXX not used
# duplicated in OBJ exporter
@@ -174,6 +193,7 @@ def getVertsFromGroup(me, group_index):
return ret
+
# ob must be OB_MESH
def BPyMesh_meshWeight2List(ob, me):
''' Takes a mesh and return its group names and a list of lists, one list per vertex.
@@ -182,14 +202,14 @@ def BPyMesh_meshWeight2List(ob, me):
'''
# Clear the vert group.
- groupNames= [g.name for g in ob.vertex_groups]
- len_groupNames= len(groupNames)
+ groupNames = [g.name for g in ob.vertex_groups]
+ len_groupNames = len(groupNames)
if not len_groupNames:
# no verts? return a vert aligned empty list
return [[] for i in range(len(me.vertices))], []
else:
- vWeightList= [[0.0]*len_groupNames for i in range(len(me.vertices))]
+ vWeightList = [[0.0] * len_groupNames for i in range(len(me.vertices))]
for i, v in enumerate(me.vertices):
for g in v.groups:
@@ -197,24 +217,24 @@ def BPyMesh_meshWeight2List(ob, me):
return groupNames, vWeightList
+
def meshNormalizedWeights(ob, me):
- try: # account for old bad BPyMesh
+ try: # account for old bad BPyMesh
groupNames, vWeightList = BPyMesh_meshWeight2List(ob, me)
-# groupNames, vWeightList = BPyMesh.meshWeight2List(me)
except:
- return [],[]
+ return [], []
if not groupNames:
- return [],[]
+ return [], []
for i, vWeights in enumerate(vWeightList):
tot = 0.0
for w in vWeights:
- tot+=w
+ tot += w
if tot:
for j, w in enumerate(vWeights):
- vWeights[j] = w/tot
+ vWeights[j] = w / tot
return groupNames, vWeightList
@@ -226,33 +246,34 @@ header_comment = \
'''
+
# This func can be called with just the filepath
def save(operator, context, filepath="",
- GLOBAL_MATRIX = None,
- EXP_OBS_SELECTED = True,
- EXP_MESH = True,
- EXP_MESH_APPLY_MOD = True,
- EXP_ARMATURE = True,
- EXP_LAMP = True,
- EXP_CAMERA = True,
- EXP_EMPTY = True,
- EXP_IMAGE_COPY = False,
- ANIM_ENABLE = True,
- ANIM_OPTIMIZE = True,
- ANIM_OPTIMIZE_PRECISSION = 6,
- ANIM_ACTION_ALL = False,
- BATCH_ENABLE = False,
- BATCH_GROUP = True,
- BATCH_FILE_PREFIX = '',
- BATCH_OWN_DIR = False
+ GLOBAL_MATRIX=None,
+ EXP_OBS_SELECTED=True,
+ EXP_MESH=True,
+ EXP_MESH_APPLY_MOD=True,
+ EXP_ARMATURE=True,
+ EXP_LAMP=True,
+ EXP_CAMERA=True,
+ EXP_EMPTY=True,
+ EXP_IMAGE_COPY=False,
+ ANIM_ENABLE=True,
+ ANIM_OPTIMIZE=True,
+ ANIM_OPTIMIZE_PRECISSION=6,
+ ANIM_ACTION_ALL=False,
+ BATCH_ENABLE=False,
+ BATCH_GROUP=True,
+ BATCH_FILE_PREFIX='',
+ BATCH_OWN_DIR=False
):
- #XXX, missing arg
+ #XXX, missing arg
batch_objects = None
# testing
- mtx_x90 = Matrix.Rotation( math.pi/2.0, 3, 'X') # used
- mtx4_z90 = Matrix.Rotation( math.pi/2.0, 4, 'Z')
+ mtx_x90 = Matrix.Rotation(math.pi / 2.0, 3, 'X') # used
+ mtx4_z90 = Matrix.Rotation(math.pi / 2.0, 4, 'Z')
if GLOBAL_MATRIX is None:
GLOBAL_MATRIX = Matrix()
@@ -266,21 +287,18 @@ def save(operator, context, filepath="",
# get the path component of filepath
tmp_exists = bpy.utils.exists(fbxpath)
-# tmp_exists = Blender.sys.exists(fbxpath)
- if tmp_exists != 2: # a file, we want a path
+ if tmp_exists != 2: # a file, we want a path
fbxpath = os.path.dirname(fbxpath)
# while fbxpath and fbxpath[-1] not in ('/', '\\'):
# fbxpath = fbxpath[:-1]
if not fbxpath:
-# if not filepath:
# XXX
print('Error%t|Directory does not exist!')
# Draw.PupMenu('Error%t|Directory does not exist!')
return
tmp_exists = bpy.utils.exists(fbxpath)
-# tmp_exists = Blender.sys.exists(fbxpath)
if tmp_exists != 2:
# XXX
@@ -292,7 +310,6 @@ def save(operator, context, filepath="",
fbxpath += os.sep
del tmp_exists
-
if BATCH_GROUP:
data_seq = bpy.data.groups
else:
@@ -300,13 +317,11 @@ def save(operator, context, filepath="",
# call this function within a loop with BATCH_ENABLE == False
orig_sce = context.scene
-# orig_sce = bpy.data.scenes.active
- new_fbxpath = fbxpath # own dir option modifies, we need to keep an original
- for data in data_seq: # scene or group
+ new_fbxpath = fbxpath # own dir option modifies, we need to keep an original
+ for data in data_seq: # scene or group
newname = BATCH_FILE_PREFIX + bpy.path.clean_name(data.name)
-
if BATCH_OWN_DIR:
new_fbxpath = fbxpath + newname + os.sep
# path may already exist
@@ -316,16 +331,15 @@ def save(operator, context, filepath="",
# if Blender.sys.exists(new_fbxpath) == 0:
os.mkdir(new_fbxpath)
-
filepath = new_fbxpath + newname + '.fbx'
print('\nBatch exporting %s as...\n\t%r' % (data, filepath))
# XXX don't know what to do with this, probably do the same? (Arystan)
- if BATCH_GROUP: #group
+ if BATCH_GROUP: # group
# group, so objects update properly, add a dummy scene.
scene = bpy.data.scenes.new()
- scene.Layers = (1<<20) -1
+ scene.Layers = (1 << 20) - 1
bpy.data.scenes.active = scene
for ob_base in data.objects:
scene.objects.link(ob_base)
@@ -334,12 +348,9 @@ def save(operator, context, filepath="",
# TODO - BUMMER! Armatures not in the group wont animate the mesh
- else:# scene
-
-
+ else: # scene
data_seq.active = data
-
# Call self with modified args
# Dont pass batch options since we already usedt them
write(filepath, data.objects,
@@ -365,7 +376,7 @@ def save(operator, context, filepath="",
bpy.data.scenes.active = orig_sce
- return # so the script wont run after we have batch exported.
+ return # so the script wont run after we have batch exported.
# end batch support
@@ -377,39 +388,38 @@ def save(operator, context, filepath="",
# ----------------------------------------------
# storage classes
class my_bone_class(object):
- __slots__ =("blenName",
- "blenBone",
- "blenMeshes",
- "restMatrix",
- "parent",
- "blenName",
- "fbxName",
- "fbxArm",
- "__pose_bone",
- "__anim_poselist")
+ __slots__ = ("blenName",
+ "blenBone",
+ "blenMeshes",
+ "restMatrix",
+ "parent",
+ "blenName",
+ "fbxName",
+ "fbxArm",
+ "__pose_bone",
+ "__anim_poselist")
def __init__(self, blenBone, fbxArm):
# This is so 2 armatures dont have naming conflicts since FBX bones use object namespace
self.fbxName = sane_obname(blenBone)
- self.blenName = blenBone.name
- self.blenBone = blenBone
- self.blenMeshes = {} # fbxMeshObName : mesh
- self.fbxArm = fbxArm
- self.restMatrix = blenBone.matrix_local
+ self.blenName = blenBone.name
+ self.blenBone = blenBone
+ self.blenMeshes = {} # fbxMeshObName : mesh
+ self.fbxArm = fbxArm
+ self.restMatrix = blenBone.matrix_local
# self.restMatrix = blenBone.matrix['ARMATURESPACE']
# not used yet
# self.restMatrixInv = self.restMatrix.copy().invert()
# self.restMatrixLocal = None # set later, need parent matrix
- self.parent = None
+ self.parent = None
# not public
pose = fbxArm.blenObject.pose
-# pose = fbxArm.blenObject.getPose()
- self.__pose_bone = pose.bones[self.blenName]
+ self.__pose_bone = pose.bones[self.blenName]
# store a list if matricies here, (poseMatrix, head, tail)
# {frame:posematrix, frame:posematrix, ...}
@@ -434,10 +444,9 @@ def save(operator, context, filepath="",
'''
self.__anim_poselist[f] = self.__pose_bone.matrix.copy()
-# self.__anim_poselist[f] = self.__pose_bone.poseMatrix.copy()
# get pose from frame.
- def getPoseMatrix(self, f):# ----------------------------------------------
+ def getPoseMatrix(self, f): # ----------------------------------------------
return self.__anim_poselist[f]
'''
def getPoseHead(self, f):
@@ -466,36 +475,37 @@ def save(operator, context, filepath="",
def flushAnimData(self):
self.__anim_poselist.clear()
-
class my_object_generic(object):
- __slots__ =("fbxName",
- "blenObject",
- "blenData",
- "origData",
- "blenTextures",
- "blenMaterials",
- "blenMaterialList",
- "blenAction",
- "blenActionList",
- "fbxGroupNames",
- "fbxParent",
- "fbxBoneParent",
- "fbxBones",
- "fbxArm",
- "matrixWorld",
- "__anim_poselist",
- )
+ __slots__ = ("fbxName",
+ "blenObject",
+ "blenData",
+ "origData",
+ "blenTextures",
+ "blenMaterials",
+ "blenMaterialList",
+ "blenAction",
+ "blenActionList",
+ "fbxGroupNames",
+ "fbxParent",
+ "fbxBoneParent",
+ "fbxBones",
+ "fbxArm",
+ "matrixWorld",
+ "__anim_poselist",
+ )
# Other settings can be applied for each type - mesh, armature etc.
- def __init__(self, ob, matrixWorld = None):
+ def __init__(self, ob, matrixWorld=None):
self.fbxName = sane_obname(ob)
self.blenObject = ob
self.fbxGroupNames = []
- self.fbxParent = None # set later on IF the parent is in the selection.
- if matrixWorld: self.matrixWorld = GLOBAL_MATRIX * matrixWorld
- else: self.matrixWorld = GLOBAL_MATRIX * ob.matrix_world
-# else: self.matrixWorld = ob.matrixWorld * GLOBAL_MATRIX
- self.__anim_poselist = {} # we should only access this
+ self.fbxParent = None # set later on IF the parent is in the selection.
+ if matrixWorld:
+ self.matrixWorld = GLOBAL_MATRIX * matrixWorld
+ else:
+ self.matrixWorld = GLOBAL_MATRIX * ob.matrix_world
+
+ self.__anim_poselist = {} # we should only access this
def parRelMatrix(self):
if self.fbxParent:
@@ -506,9 +516,9 @@ def save(operator, context, filepath="",
def setPoseFrame(self, f, fake=False):
if fake:
# annoying, have to clear GLOBAL_MATRIX
- self.__anim_poselist[f] = self.matrixWorld * GLOBAL_MATRIX.copy().invert()
+ self.__anim_poselist[f] = self.matrixWorld * GLOBAL_MATRIX.copy().invert()
else:
- self.__anim_poselist[f] = self.blenObject.matrix_world.copy()
+ self.__anim_poselist[f] = self.blenObject.matrix_world.copy()
def getAnimParRelMatrix(self, frame):
if self.fbxParent:
@@ -525,20 +535,16 @@ def save(operator, context, filepath="",
matrix_rot = (GLOBAL_MATRIX * self.__anim_poselist[frame]).rotation_part()
# Lamps need to be rotated
- if obj_type =='LAMP':
+ if obj_type == 'LAMP':
matrix_rot = matrix_rot * mtx_x90
- elif obj_type =='CAMERA':
+ elif obj_type == 'CAMERA':
y = Vector((0.0, 1.0, 0.0)) * matrix_rot
- matrix_rot = Matrix.Rotation(math.pi/2.0, 3, y) * matrix_rot
+ matrix_rot = Matrix.Rotation(math.pi / 2.0, 3, y) * matrix_rot
return matrix_rot
# ----------------------------------------------
-
-
-
-
print('\nFBX export starting... %r' % filepath)
start_time = time.clock()
try:
@@ -549,13 +555,12 @@ def save(operator, context, filepath="",
scene = context.scene
world = scene.world
-
# ---------------------------- Write the header first
file.write(header_comment)
if time:
curtime = time.localtime()[0:6]
else:
- curtime = (0,0,0,0,0,0)
+ curtime = (0, 0, 0, 0, 0, 0)
#
file.write(\
'''FBXHeaderExtension: {
@@ -580,11 +585,10 @@ def save(operator, context, filepath="",
file.write('\nCreationTime: "%.4i-%.2i-%.2i %.2i:%.2i:%.2i:000"' % curtime)
file.write('\nCreator: "Blender version %s"' % bpy.app.version_string)
-
- pose_items = [] # list of (fbxName, matrix) to write pose data for, easier to collect allong the way
+ pose_items = [] # list of (fbxName, matrix) to write pose data for, easier to collect allong the way
# --------------- funcs for exporting
- def object_tx(ob, loc, matrix, matrix_mod = None):
+ def object_tx(ob, loc, matrix, matrix_mod=None):
'''
Matrix mod is so armature objects can modify their bone matricies
'''
@@ -593,26 +597,27 @@ def save(operator, context, filepath="",
# we know we have a matrix
# matrix = mtx4_z90 * (ob.matrix['ARMATURESPACE'] * matrix_mod)
- matrix = ob.matrix_local * mtx4_z90 # dont apply armature matrix anymore
+ matrix = ob.matrix_local * mtx4_z90 # dont apply armature matrix anymore
# matrix = mtx4_z90 * ob.matrix['ARMATURESPACE'] # dont apply armature matrix anymore
parent = ob.parent
if parent:
#par_matrix = mtx4_z90 * (parent.matrix['ARMATURESPACE'] * matrix_mod)
- par_matrix = parent.matrix_local * mtx4_z90 # dont apply armature matrix anymore
+ par_matrix = parent.matrix_local * mtx4_z90 # dont apply armature matrix anymore
# par_matrix = mtx4_z90 * parent.matrix['ARMATURESPACE'] # dont apply armature matrix anymore
matrix = par_matrix.copy().invert() * matrix
loc, rot, scale = matrix.decompose()
matrix_rot = rot.to_matrix()
-
+
loc = tuple(loc)
- rot = tuple(rot.to_euler()) # quat -> euler
+ rot = tuple(rot.to_euler()) # quat -> euler
scale = tuple(scale)
else:
# This is bad because we need the parent relative matrix from the fbx parent (if we have one), dont use anymore
#if ob and not matrix: matrix = ob.matrix_world * GLOBAL_MATRIX
- if ob and not matrix: raise Exception("error: this should never happen!")
+ if ob and not matrix:
+ raise Exception("error: this should never happen!")
matrix_rot = matrix
#if matrix:
@@ -623,11 +628,11 @@ def save(operator, context, filepath="",
matrix_rot = rot.to_matrix()
# Lamps need to be rotated
- if ob and ob.type =='LAMP':
+ if ob and ob.type == 'LAMP':
matrix_rot = matrix_rot * mtx_x90
- elif ob and ob.type =='CAMERA':
+ elif ob and ob.type == 'CAMERA':
y = Vector((0.0, 1.0, 0.0)) * matrix_rot
- matrix_rot = Matrix.Rotation(math.pi/2, 3, y) * matrix_rot
+ matrix_rot = Matrix.Rotation(math.pi / 2.0, 3, y) * matrix_rot
# else do nothing.
loc = tuple(loc)
@@ -635,13 +640,13 @@ def save(operator, context, filepath="",
scale = tuple(scale)
else:
if not loc:
- loc = 0,0,0
- scale = 1,1,1
- rot = 0,0,0
+ loc = 0.0, 0.0, 0.0
+ scale = 1.0, 1.0, 1.0
+ rot = 0.0, 0.0, 0.0
return loc, rot, scale, matrix, matrix_rot
- def write_object_tx(ob, loc, matrix, matrix_mod= None):
+ def write_object_tx(ob, loc, matrix, matrix_mod=None):
'''
We have loc to set the location if non blender objects that have a location
@@ -746,7 +751,6 @@ def save(operator, context, filepath="",
return loc, rot, scale, matrix, matrix_rot
-
# -------------------------------------------- Armatures
#def write_bone(bone, name, matrix_mod):
def write_bone(my_bone):
@@ -754,9 +758,8 @@ def save(operator, context, filepath="",
file.write('\n\t\tVersion: 232')
#poseMatrix = write_object_props(my_bone.blenBone, None, None, my_bone.fbxArm.parRelMatrix())[3]
- poseMatrix = write_object_props(my_bone.blenBone)[3] # dont apply bone matricies anymore
- pose_items.append( (my_bone.fbxName, poseMatrix) )
-
+ poseMatrix = write_object_props(my_bone.blenBone)[3] # dont apply bone matricies anymore
+ pose_items.append((my_bone.fbxName, poseMatrix))
# file.write('\n\t\t\tProperty: "Size", "double", "",%.6f' % ((my_bone.blenData.head['ARMATURESPACE'] - my_bone.blenData.tail['ARMATURESPACE']) * my_bone.fbxArm.parRelMatrix()).length)
file.write('\n\t\t\tProperty: "Size", "double", "",1')
@@ -806,7 +809,7 @@ def save(operator, context, filepath="",
}''')
def write_camera_dummy(name, loc, near, far, proj_type, up):
- file.write('\n\tModel: "Model::%s", "Camera" {' % name )
+ file.write('\n\tModel: "Model::%s", "Camera" {' % name)
file.write('\n\t\tVersion: 232')
write_object_props(None, loc)
@@ -897,26 +900,26 @@ def save(operator, context, filepath="",
def write_camera_default():
# This sucks but to match FBX converter its easier to
# write the cameras though they are not needed.
- write_camera_dummy('Producer Perspective', (0,71.3,287.5), 10, 4000, 0, (0,1,0))
- write_camera_dummy('Producer Top', (0,4000,0), 1, 30000, 1, (0,0,-1))
- write_camera_dummy('Producer Bottom', (0,-4000,0), 1, 30000, 1, (0,0,-1))
- write_camera_dummy('Producer Front', (0,0,4000), 1, 30000, 1, (0,1,0))
- write_camera_dummy('Producer Back', (0,0,-4000), 1, 30000, 1, (0,1,0))
- write_camera_dummy('Producer Right', (4000,0,0), 1, 30000, 1, (0,1,0))
- write_camera_dummy('Producer Left', (-4000,0,0), 1, 30000, 1, (0,1,0))
+ write_camera_dummy('Producer Perspective', (0, 71.3, 287.5), 10, 4000, 0, (0, 1, 0))
+ write_camera_dummy('Producer Top', (0, 4000, 0), 1, 30000, 1, (0, 0, -1))
+ write_camera_dummy('Producer Bottom', (0, -4000, 0), 1, 30000, 1, (0, 0, -1))
+ write_camera_dummy('Producer Front', (0, 0, 4000), 1, 30000, 1, (0, 1, 0))
+ write_camera_dummy('Producer Back', (0, 0, -4000), 1, 30000, 1, (0, 1, 0))
+ write_camera_dummy('Producer Right', (4000, 0, 0), 1, 30000, 1, (0, 1, 0))
+ write_camera_dummy('Producer Left', (-4000, 0, 0), 1, 30000, 1, (0, 1, 0))
def write_camera(my_cam):
'''
Write a blender camera
'''
render = scene.render
- width = render.resolution_x
- height = render.resolution_y
- aspect = width / height
+ width = render.resolution_x
+ height = render.resolution_y
+ aspect = width / height
data = my_cam.blenObject.data
- file.write('\n\tModel: "Model::%s", "Camera" {' % my_cam.fbxName )
+ file.write('\n\tModel: "Model::%s", "Camera" {' % my_cam.fbxName)
file.write('\n\t\tVersion: 232')
loc, rot, scale, matrix, matrix_rot = write_object_props(my_cam.blenObject, None, my_cam.parRelMatrix())
@@ -925,8 +928,8 @@ def save(operator, context, filepath="",
file.write('\n\t\t\tProperty: "FieldOfViewX", "FieldOfView", "A+",1')
file.write('\n\t\t\tProperty: "FieldOfViewY", "FieldOfView", "A+",1')
# file.write('\n\t\t\tProperty: "FocalLength", "Real", "A+",14.0323972702026')
- file.write('\n\t\t\tProperty: "OpticalCenterX", "Real", "A+",%.6f' % data.shift_x) # not sure if this is in the correct units?
- file.write('\n\t\t\tProperty: "OpticalCenterY", "Real", "A+",%.6f' % data.shift_y) # ditto
+ file.write('\n\t\t\tProperty: "OpticalCenterX", "Real", "A+",%.6f' % data.shift_x) # not sure if this is in the correct units?
+ file.write('\n\t\t\tProperty: "OpticalCenterY", "Real", "A+",%.6f' % data.shift_y) # ditto
file.write('\n\t\t\tProperty: "BackgroundColor", "Color", "A+",0,0,0')
file.write('\n\t\t\tProperty: "TurnTable", "Real", "A+",0')
file.write('\n\t\t\tProperty: "DisplayTurnTableIcon", "bool", "",1')
@@ -1035,7 +1038,8 @@ def save(operator, context, filepath="",
light_type_items = {'POINT': 0, 'SUN': 1, 'SPOT': 2, 'HEMI': 3, 'AREA': 4}
light_type = light_type_items[light.type]
- if light_type > 2: light_type = 1 # hemi and area lights become directional
+ if light_type > 2:
+ light_type = 1 # hemi and area lights become directional
# mode = light.mode
if light.shadow_method == 'RAY_SHADOW' or light.shadow_method == 'BUFFER_SHADOW':
@@ -1050,7 +1054,7 @@ def save(operator, context, filepath="",
else:
do_light = 1
- scale = abs(GLOBAL_MATRIX.scale_part()[0]) # scale is always uniform in this case
+ scale = abs(GLOBAL_MATRIX.scale_part()[0]) # scale is always uniform in this case
file.write('\n\t\t\tProperty: "LightType", "enum", "",%i' % light_type)
file.write('\n\t\t\tProperty: "CastLightOnObject", "bool", "",1')
@@ -1059,13 +1063,13 @@ def save(operator, context, filepath="",
file.write('\n\t\t\tProperty: "DrawFrontFacingVolumetricLight", "bool", "",0')
file.write('\n\t\t\tProperty: "GoboProperty", "object", ""')
file.write('\n\t\t\tProperty: "Color", "Color", "A+",1,1,1')
- file.write('\n\t\t\tProperty: "Intensity", "Intensity", "A+",%.2f' % (min(light.energy*100, 200))) # clamp below 200
+ file.write('\n\t\t\tProperty: "Intensity", "Intensity", "A+",%.2f' % (min(light.energy * 100.0, 200.0))) # clamp below 200
if light.type == 'SPOT':
file.write('\n\t\t\tProperty: "Cone angle", "Cone angle", "A+",%.2f' % math.degrees(light.spot_size))
file.write('\n\t\t\tProperty: "Fog", "Fog", "A+",50')
file.write('\n\t\t\tProperty: "Color", "Color", "A",%.2f,%.2f,%.2f' % tuple(light.color))
- file.write('\n\t\t\tProperty: "Intensity", "Intensity", "A+",%.2f' % (min(light.energy*100, 200))) # clamp below 200
+ file.write('\n\t\t\tProperty: "Intensity", "Intensity", "A+",%.2f' % (min(light.energy * 100.0, 200.0))) # clamp below 200
file.write('\n\t\t\tProperty: "Fog", "Fog", "A+",50')
file.write('\n\t\t\tProperty: "LightType", "enum", "",%i' % light_type)
@@ -1094,9 +1098,10 @@ def save(operator, context, filepath="",
file.write('\n\t}')
# matrixOnly is not used at the moment
- def write_null(my_null = None, fbxName = None, matrixOnly = None):
+ def write_null(my_null=None, fbxName=None, matrixOnly=None):
# ob can be null
- if not fbxName: fbxName = my_null.fbxName
+ if not fbxName:
+ fbxName = my_null.fbxName
file.write('\n\tModel: "Model::%s", "Null" {' % fbxName)
file.write('\n\t\tVersion: 232')
@@ -1105,9 +1110,11 @@ def save(operator, context, filepath="",
if matrixOnly:
poseMatrix = write_object_props(None, None, matrixOnly)[3]
- else: # all other Null's
- if my_null: poseMatrix = write_object_props(my_null.blenObject, None, my_null.parRelMatrix())[3]
- else: poseMatrix = write_object_props()[3]
+ else: # all other Null's
+ if my_null:
+ poseMatrix = write_object_props(my_null.blenObject, None, my_null.parRelMatrix())[3]
+ else:
+ poseMatrix = write_object_props()[3]
pose_items.append((fbxName, poseMatrix))
@@ -1121,9 +1128,10 @@ def save(operator, context, filepath="",
}''')
# Material Settings
- if world: world_amb = tuple(world.ambient_color)
-# if world: world_amb = world.getAmb()
- else: world_amb = (0,0,0) # Default value
+ if world:
+ world_amb = world.ambient_color[:]
+ else:
+ world_amb = 0.0, 0.0, 0.0 # default value
def write_material(matname, mat):
file.write('\n\tMaterial: "Material::%s", "" {' % matname)
@@ -1137,8 +1145,8 @@ def save(operator, context, filepath="",
mat_dif = mat.diffuse_intensity
mat_amb = mat.ambient
- mat_hard = (float(mat.specular_hardness)-1)/5.10
- mat_spec = mat.specular_intensity/2.0
+ mat_hard = (float(mat.specular_hardness) - 1.0) / 5.10
+ mat_spec = mat.specular_intensity / 2.0
mat_alpha = mat.alpha
mat_emit = mat.emit
mat_shadeless = mat.use_shadeless
@@ -1151,7 +1159,7 @@ def save(operator, context, filepath="",
mat_shader = 'Phong'
else:
mat_cols = mat_cold = 0.8, 0.8, 0.8
- mat_colamb = 0.0,0.0,0.0
+ mat_colamb = 0.0, 0.0, 0.0
# mat_colm
mat_dif = 1.0
mat_amb = 0.5
@@ -1169,7 +1177,7 @@ def save(operator, context, filepath="",
file.write('\n\t\tProperties60: {')
file.write('\n\t\t\tProperty: "ShadingModel", "KString", "", "%s"' % mat_shader)
file.write('\n\t\t\tProperty: "MultiLayer", "bool", "",0')
- file.write('\n\t\t\tProperty: "EmissiveColor", "ColorRGB", "",%.4f,%.4f,%.4f' % mat_cold) # emit and diffuse color are he same in blender
+ file.write('\n\t\t\tProperty: "EmissiveColor", "ColorRGB", "",%.4f,%.4f,%.4f' % mat_cold) # emit and diffuse color are he same in blender
file.write('\n\t\t\tProperty: "EmissiveFactor", "double", "",%.4f' % mat_emit)
file.write('\n\t\t\tProperty: "AmbientColor", "ColorRGB", "",%.4f,%.4f,%.4f' % mat_colamb)
@@ -1234,7 +1242,6 @@ def save(operator, context, filepath="",
file.write('\n\t\t\tProperty: "Path", "charptr", "", "%s"' % fname_strip)
-
file.write('''
Property: "StartFrame", "int", "",0
Property: "StopFrame", "int", "",0
@@ -1248,11 +1255,11 @@ def save(operator, context, filepath="",
UseMipMap: 0''')
file.write('\n\t\tFilename: "%s"' % fname_strip)
- if fname_strip: fname_strip = '/' + fname_strip
- file.write('\n\t\tRelativeFilename: "%s"' % fname_rel) # make relative
+ if fname_strip:
+ fname_strip = '/' + fname_strip
+ file.write('\n\t\tRelativeFilename: "%s"' % fname_rel) # make relative
file.write('\n\t}')
-
def write_texture(texname, tex, num):
# if tex is None then this is a dummy tex
file.write('\n\tTexture: "Texture::%s", "TextureVideoClip" {' % texname)
@@ -1268,7 +1275,6 @@ def save(operator, context, filepath="",
Property: "Scaling", "Vector", "A+",1,1,1''')
file.write('\n\t\t\tProperty: "Texture alpha", "Number", "A+",%i' % num)
-
# WrapModeU/V 0==rep, 1==clamp, TODO add support
file.write('''
Property: "TextureTypeUse", "enum", "",0
@@ -1296,7 +1302,7 @@ def save(operator, context, filepath="",
fname = fname_strip = fname_rel = ''
file.write('\n\t\tFileName: "%s"' % fname_strip)
- file.write('\n\t\tRelativeFilename: "%s"' % fname_rel) # need some make relative command
+ file.write('\n\t\tRelativeFilename: "%s"' % fname_rel) # need some make relative command
file.write('''
ModelUVTranslation: 0,0
@@ -1367,27 +1373,27 @@ def save(operator, context, filepath="",
i = -1
for vg in vgroup_data:
if i == -1:
- file.write('%i' % vg[0])
- i=0
+ file.write('%i' % vg[0])
+ i = 0
else:
- if i==23:
+ if i == 23:
file.write('\n\t\t')
- i=0
+ i = 0
file.write(',%i' % vg[0])
- i+=1
+ i += 1
file.write('\n\t\tWeights: ')
i = -1
for vg in vgroup_data:
if i == -1:
- file.write('%.8f' % vg[1])
- i=0
+ file.write('%.8f' % vg[1])
+ i = 0
else:
- if i==38:
+ if i == 38:
file.write('\n\t\t')
- i=0
+ i = 0
file.write(',%.8f' % vg[1])
- i+=1
+ i += 1
if my_mesh.fbxParent:
# TODO FIXME, this case is broken in some cases. skinned meshes just shouldnt have parents where possible!
@@ -1400,7 +1406,7 @@ def save(operator, context, filepath="",
matstr = mat4x4str(m)
matstr_i = mat4x4str(m.invert())
- file.write('\n\t\tTransform: %s' % matstr_i) # THIS IS __NOT__ THE GLOBAL MATRIX AS DOCUMENTED :/
+ file.write('\n\t\tTransform: %s' % matstr_i) # THIS IS __NOT__ THE GLOBAL MATRIX AS DOCUMENTED :/
file.write('\n\t\tTransformLink: %s' % matstr)
file.write('\n\t}')
@@ -1414,7 +1420,7 @@ def save(operator, context, filepath="",
do_uvs = bool(me.uv_textures)
file.write('\n\tModel: "Model::%s", "Mesh" {' % my_mesh.fbxName)
- file.write('\n\t\tVersion: 232') # newline is added in write_object_props
+ file.write('\n\t\tVersion: 232') # newline is added in write_object_props
# convert into lists once.
me_vertices = me.vertices[:]
@@ -1430,10 +1436,9 @@ def save(operator, context, filepath="",
file.write('\n\t\tShading: Y')
file.write('\n\t\tCulling: "CullingOff"')
-
# Write the Real Mesh data here
file.write('\n\t\tVertices: ')
- i=-1
+ i = -1
for v in me_vertices:
if i == -1:
@@ -1443,11 +1448,11 @@ def save(operator, context, filepath="",
if i == 7:
file.write('\n\t\t')
i = 0
- file.write(',%.6f,%.6f,%.6f'% v.co[:])
- i+=1
+ file.write(',%.6f,%.6f,%.6f' % v.co[:])
+ i += 1
file.write('\n\t\tPolygonVertexIndex: ')
- i=-1
+ i = -1
for f in me_faces:
fi = f.vertices[:]
@@ -1474,29 +1479,28 @@ def save(operator, context, filepath="",
ed_val = ed.vertices[:]
ed_val = ed_val[0], ed_val[-1] ^ -1
- if i==-1:
+ if i == -1:
file.write('%i,%i' % ed_val)
- i=0
+ i = 0
else:
- if i==13:
+ if i == 13:
file.write('\n\t\t')
- i=0
+ i = 0
file.write(',%i,%i' % ed_val)
- i+=1
-
+ i += 1
file.write('\n\t\tEdges: ')
- i=-1
+ i = -1
for ed in me_edges:
- if i==-1:
+ if i == -1:
file.write('%i,%i' % (ed.vertices[0], ed.vertices[1]))
- i=0
+ i = 0
else:
- if i==13:
+ if i == 13:
file.write('\n\t\t')
- i=0
+ i = 0
file.write(',%i,%i' % (ed.vertices[0], ed.vertices[1]))
- i+=1
+ i += 1
file.write('\n\t\tGeometryVersion: 124')
@@ -1508,15 +1512,17 @@ def save(operator, context, filepath="",
ReferenceInformationType: "Direct"
Normals: ''')
- i=-1
+ i = -1
for v in me_vertices:
- if i==-1:
- file.write('%.15f,%.15f,%.15f' % v.normal[:]); i=0
+ if i == -1:
+ file.write('%.15f,%.15f,%.15f' % v.normal[:])
+ i = 0
else:
- if i==2:
- file.write('\n '); i=0
+ if i == 2:
+ file.write('\n\t\t\t ')
+ i = 0
file.write(',%.15f,%.15f,%.15f' % v.normal[:])
- i+=1
+ i += 1
file.write('\n\t\t}')
# Write Face Smoothing
@@ -1528,15 +1534,17 @@ def save(operator, context, filepath="",
ReferenceInformationType: "Direct"
Smoothing: ''')
- i=-1
+ i = -1
for f in me_faces:
- if i==-1:
- file.write('%i' % f.use_smooth); i=0
+ if i == -1:
+ file.write('%i' % f.use_smooth)
+ i = 0
else:
- if i==54:
- file.write('\n '); i=0
+ if i == 54:
+ file.write('\n\t\t\t ')
+ i = 0
file.write(',%i' % f.use_smooth)
- i+=1
+ i += 1
file.write('\n\t\t}')
@@ -1549,19 +1557,20 @@ def save(operator, context, filepath="",
ReferenceInformationType: "Direct"
Smoothing: ''')
- i=-1
+ i = -1
for ed in me_edges:
- if i==-1:
- file.write('%i' % (ed.use_edge_sharp)); i=0
+ if i == -1:
+ file.write('%i' % (ed.use_edge_sharp))
+ i = 0
else:
- if i==54:
- file.write('\n '); i=0
+ if i == 54:
+ file.write('\n\t\t\t ')
+ i = 0
file.write(',%i' % (ed.use_edge_sharp))
- i+=1
+ i += 1
file.write('\n\t\t}')
-
# Write VertexColor Layers
# note, no programs seem to use this info :/
collayers = []
@@ -1578,7 +1587,7 @@ def save(operator, context, filepath="",
Colors: ''')
i = -1
- ii = 0 # Count how many Colors we write
+ ii = 0 # Count how many Colors we write
for fi, cf in enumerate(collayer.data):
if len(me_faces[fi].vertices) == 4:
@@ -1587,34 +1596,32 @@ def save(operator, context, filepath="",
colors = cf.color1[:], cf.color2[:], cf.color3[:]
for col in colors:
- if i==-1:
+ if i == -1:
file.write('%.4f,%.4f,%.4f,1' % col)
- i=0
+ i = 0
else:
- if i==7:
+ if i == 7:
file.write('\n\t\t\t\t')
- i=0
+ i = 0
file.write(',%.4f,%.4f,%.4f,1' % col)
- i+=1
- ii+=1 # One more Color
+ i += 1
+ ii += 1 # One more Color
file.write('\n\t\t\tColorIndex: ')
i = -1
for j in range(ii):
if i == -1:
file.write('%i' % j)
- i=0
+ i = 0
else:
- if i==55:
+ if i == 55:
file.write('\n\t\t\t\t')
- i=0
+ i = 0
file.write(',%i' % j)
- i+=1
+ i += 1
file.write('\n\t\t}')
-
-
# Write UV and texture layers.
uvlayers = []
if do_uvs:
@@ -1631,34 +1638,34 @@ def save(operator, context, filepath="",
UV: ''')
i = -1
- ii = 0 # Count how many UVs we write
+ ii = 0 # Count how many UVs we write
for uf in uvlayer.data:
# workaround, since uf.uv iteration is wrong atm
for uv in uf.uv:
- if i==-1:
+ if i == -1:
file.write('%.6f,%.6f' % uv[:])
- i=0
+ i = 0
else:
- if i==7:
+ if i == 7:
file.write('\n\t\t\t ')
- i=0
+ i = 0
file.write(',%.6f,%.6f' % uv[:])
- i+=1
- ii+=1 # One more UV
+ i += 1
+ ii += 1 # One more UV
file.write('\n\t\t\tUVIndex: ')
i = -1
for j in range(ii):
if i == -1:
- file.write('%i' % j)
- i=0
+ file.write('%i' % j)
+ i = 0
else:
- if i==55:
+ if i == 55:
file.write('\n\t\t\t\t')
- i=0
+ i = 0
file.write(',%i' % j)
- i+=1
+ i += 1
file.write('\n\t\t}')
@@ -1680,28 +1687,28 @@ def save(operator, context, filepath="",
if len(my_mesh.blenTextures) == 1:
file.write('0')
else:
- texture_mapping_local = {None:-1}
+ texture_mapping_local = {None: -1}
- i = 0 # 1 for dummy
+ i = 0 # 1 for dummy
for tex in my_mesh.blenTextures:
- if tex: # None is set above
+ if tex: # None is set above
texture_mapping_local[tex] = i
- i+=1
+ i += 1
- i=-1
+ i = -1
for f in uvlayer.data:
img_key = f.image
- if i==-1:
- i=0
- file.write( '%s' % texture_mapping_local[img_key])
+ if i == -1:
+ i = 0
+ file.write('%s' % texture_mapping_local[img_key])
else:
- if i==55:
+ if i == 55:
file.write('\n ')
- i=0
+ i = 0
file.write(',%s' % texture_mapping_local[img_key])
- i+=1
+ i += 1
else:
file.write('''
@@ -1715,9 +1722,7 @@ def save(operator, context, filepath="",
TextureId: ''')
file.write('\n\t\t}')
-
# Done with UV/textures.
-
if do_materials:
file.write('\n\t\tLayerElementMaterial: 0 {')
file.write('\n\t\t\tVersion: 101')
@@ -1735,7 +1740,7 @@ def save(operator, context, filepath="",
file.write('0')
else:
# Build a material mapping for this
- material_mapping_local = {} # local-mat & tex : global index.
+ material_mapping_local = {} # local-mat & tex : global index.
for j, mat_tex_pair in enumerate(my_mesh.blenMaterials):
material_mapping_local[mat_tex_pair] = j
@@ -1749,25 +1754,29 @@ def save(operator, context, filepath="",
else:
uv_faces = [None] * len(me_faces)
- i=-1
+ i = -1
for f, uf in zip(me_faces, uv_faces):
# for f in me_faces:
- try: mat = mats[f.material_index]
- except:mat = None
+ try:
+ mat = mats[f.material_index]
+ except:
+ mat = None
- if do_uvs: tex = uf.image # WARNING - MULTI UV LAYER IMAGES NOT SUPPORTED :/
- else: tex = None
+ if do_uvs:
+ tex = uf.image # WARNING - MULTI UV LAYER IMAGES NOT SUPPORTED :/
+ else:
+ tex = None
- if i==-1:
- i=0
- file.write( '%s' % (material_mapping_local[mat, tex])) # None for mat or tex is ok
+ if i == -1:
+ i = 0
+ file.write('%s' % (material_mapping_local[mat, tex])) # None for mat or tex is ok
else:
- if i==55:
+ if i == 55:
file.write('\n\t\t\t\t')
- i=0
+ i = 0
file.write(',%s' % (material_mapping_local[mat, tex]))
- i+=1
+ i += 1
file.write('\n\t\t}')
@@ -1801,14 +1810,13 @@ def save(operator, context, filepath="",
TypedIndex: 0
}''')
- if do_uvs: # same as me.faceUV
+ if do_uvs: # same as me.faceUV
file.write('''
LayerElement: {
Type: "LayerElementUV"
TypedIndex: 0
}''')
-
file.write('\n\t\t}')
if len(uvlayers) > 1:
@@ -1838,9 +1846,10 @@ def save(operator, context, filepath="",
if len(collayers) > 1:
# Take into account any UV layers
layer_offset = 0
- if uvlayers: layer_offset = len(uvlayers)-1
+ if uvlayers:
+ layer_offset = len(uvlayers) - 1
- for i in range(layer_offset, len(collayers)+layer_offset):
+ for i in range(layer_offset, len(collayers) + layer_offset):
file.write('\n\t\tLayer: %i {' % i)
file.write('\n\t\t\tVersion: 100')
@@ -1866,7 +1875,6 @@ def save(operator, context, filepath="",
MultiLayer: 0
}''')
-
# add meshes here to clear because they are not used anywhere.
meshes_to_clear = []
@@ -1877,21 +1885,23 @@ def save(operator, context, filepath="",
# armatures not a part of a mesh, will be added to ob_arms
ob_bones = []
ob_arms = []
- ob_null = [] # emptys
+ ob_null = [] # emptys
# List of types that have blender objects (not bones)
ob_all_typegroups = [ob_meshes, ob_lights, ob_cameras, ob_arms, ob_null]
- groups = [] # blender groups, only add ones that have objects in the selections
- materials = {} # (mat, image) keys, should be a set()
- textures = {} # should be a set()
+ groups = [] # blender groups, only add ones that have objects in the selections
+ materials = {} # (mat, image) keys, should be a set()
+ textures = {} # should be a set()
- tmp_ob_type = ob_type = None # incase no objects are exported, so as not to raise an error
+ tmp_ob_type = ob_type = None # incase no objects are exported, so as not to raise an error
# if EXP_OBS_SELECTED is false, use sceens objects
if not batch_objects:
- if EXP_OBS_SELECTED: tmp_objects = context.selected_objects
- else: tmp_objects = scene.objects
+ if EXP_OBS_SELECTED:
+ tmp_objects = context.selected_objects
+ else:
+ tmp_objects = scene.objects
else:
tmp_objects = batch_objects
@@ -1912,7 +1922,6 @@ def save(operator, context, filepath="",
# This causes the makeDisplayList command to effect the mesh
scene.frame_set(scene.frame_current)
-
for ob_base in tmp_objects:
# ignore dupli children
@@ -1936,7 +1945,8 @@ def save(operator, context, filepath="",
elif tmp_ob_type == 'ARMATURE':
if EXP_ARMATURE:
# TODO - armatures dont work in dupligroups!
- if ob not in ob_arms: ob_arms.append(ob)
+ if ob not in ob_arms:
+ ob_arms.append(ob)
# ob_arms.append(ob) # replace later. was "ob_arms.append(sane_obname(ob), ob)"
elif tmp_ob_type == 'EMPTY':
if EXP_EMPTY:
@@ -1944,28 +1954,26 @@ def save(operator, context, filepath="",
elif EXP_MESH:
origData = True
if tmp_ob_type != 'MESH':
-# me = bpy.data.meshes.new()
- try: me = ob.create_mesh(scene, True, 'PREVIEW')
-# try: me.getFromObject(ob)
- except: me = None
+ try:
+ me = ob.create_mesh(scene, True, 'PREVIEW')
+ except:
+ me = None
+
if me:
- meshes_to_clear.append( me )
+ meshes_to_clear.append(me)
mats = me.materials
origData = False
else:
# Mesh Type!
if EXP_MESH_APPLY_MOD:
-# me = bpy.data.meshes.new()
me = ob.create_mesh(scene, True, 'PREVIEW')
-# me.getFromObject(ob)
# print ob, me, me.getVertGroupNames()
- meshes_to_clear.append( me )
+ meshes_to_clear.append(me)
origData = False
mats = me.materials
else:
me = ob.data
-# me = ob.getData(mesh=1)
mats = me.materials
# # Support object colors
@@ -1978,7 +1986,6 @@ def save(operator, context, filepath="",
# del tmp_ob_mats
# del tmp_colbits
-
if me:
# # This WILL modify meshes in blender if EXP_MESH_APPLY_MOD is disabled.
# # so strictly this is bad. but only in rare cases would it have negative results
@@ -1994,10 +2001,12 @@ def save(operator, context, filepath="",
tex = uf.image
textures[tex] = texture_mapping_local[tex] = None
- try: mat = mats[f.material_index]
- except: mat = None
+ try:
+ mat = mats[f.material_index]
+ except:
+ mat = None
- materials[mat, tex] = material_mapping_local[mat, tex] = None # should use sets, wait for blender 2.5
+ materials[mat, tex] = material_mapping_local[mat, tex] = None # should use sets, wait for blender 2.5
else:
for mat in mats:
@@ -2016,10 +2025,9 @@ def save(operator, context, filepath="",
armob = ob.parent
blenParentBoneName = ob.parent_bone
-
if armob and armob not in ob_arms:
ob_arms.append(armob)
-
+
# Warning for scaled, mesh objects with armatures
if abs(ob.scale[0] - 1.0) > 0.05 or abs(ob.scale[1] - 1.0) > 0.05 or abs(ob.scale[1] - 1.0) > 0.05:
operator.report('WARNING', "Object '%s' has a scale of (%.3f, %.3f, %.3f), Armature deformation will not work as expected!, Apply Scale to fix." % ((ob.name,) + tuple(ob.scale)))
@@ -2028,24 +2036,24 @@ def save(operator, context, filepath="",
blenParentBoneName = armob = None
my_mesh = my_object_generic(ob, mtx)
- my_mesh.blenData = me
- my_mesh.origData = origData
- my_mesh.blenMaterials = list(material_mapping_local.keys())
+ my_mesh.blenData = me
+ my_mesh.origData = origData
+ my_mesh.blenMaterials = list(material_mapping_local.keys())
my_mesh.blenMaterialList = mats
- my_mesh.blenTextures = list(texture_mapping_local.keys())
+ my_mesh.blenTextures = list(texture_mapping_local.keys())
# if only 1 null texture then empty the list
if len(my_mesh.blenTextures) == 1 and my_mesh.blenTextures[0] is None:
my_mesh.blenTextures = []
- my_mesh.fbxArm = armob # replace with my_object_generic armature instance later
- my_mesh.fbxBoneParent = blenParentBoneName # replace with my_bone instance later
+ my_mesh.fbxArm = armob # replace with my_object_generic armature instance later
+ my_mesh.fbxBoneParent = blenParentBoneName # replace with my_bone instance later
- ob_meshes.append( my_mesh )
+ ob_meshes.append(my_mesh)
# not forgetting to free dupli_list
- if ob_base.dupli_list: ob_base.free_dupli_list()
-
+ if ob_base.dupli_list:
+ ob_base.free_dupli_list()
if EXP_ARMATURE:
# now we have the meshes, restore the rest arm position
@@ -2066,22 +2074,22 @@ def save(operator, context, filepath="",
ob_arms[i] = my_arm = my_object_generic(ob)
- my_arm.fbxBones = []
- my_arm.blenData = ob.data
+ my_arm.fbxBones = []
+ my_arm.blenData = ob.data
if ob.animation_data:
- my_arm.blenAction = ob.animation_data.action
+ my_arm.blenAction = ob.animation_data.action
else:
my_arm.blenAction = None
# my_arm.blenAction = ob.action
- my_arm.blenActionList = []
+ my_arm.blenActionList = []
# fbxName, blenderObject, my_bones, blenderActions
#ob_arms[i] = fbxArmObName, ob, arm_my_bones, (ob.action, [])
for bone in my_arm.blenData.bones:
my_bone = my_bone_class(bone, my_arm)
- my_arm.fbxBones.append( my_bone )
- ob_bones.append( my_bone )
+ my_arm.fbxBones.append(my_bone)
+ ob_bones.append(my_bone)
# add the meshes to the bones and replace the meshes armature with own armature class
#for obname, ob, mtx, me, mats, arm, armname in ob_meshes:
@@ -2102,13 +2110,12 @@ def save(operator, context, filepath="",
if my_bone.blenBone.use_deform:
my_bone.blenMeshes[my_mesh.fbxName] = me
-
# parent bone: replace bone names with our class instances
# my_mesh.fbxBoneParent is None or a blender bone name initialy, replacing if the names match.
if my_mesh.fbxBoneParent == my_bone.blenName:
my_mesh.fbxBoneParent = my_bone
- bone_deformer_count = 0 # count how many bones deform a mesh
+ bone_deformer_count = 0 # count how many bones deform a mesh
my_bone_blenParent = None
for my_bone in ob_bones:
my_bone_blenParent = my_bone.blenBone.parent
@@ -2125,10 +2132,9 @@ def save(operator, context, filepath="",
del my_bone_blenParent
-
# Build blenObject -> fbxObject mapping
# this is needed for groups as well as fbxParenting
- for ob in bpy.data.objects: ob.tag = False
+ bpy.data.objects.tag(False)
# using a list of object names for tagging (Arystan)
@@ -2147,25 +2153,23 @@ def save(operator, context, filepath="",
fbxGroupName = sane_groupname(blenGroup)
groups.append((fbxGroupName, blenGroup))
- tmp_obmapping[ob].fbxGroupNames.append(fbxGroupName) # also adds to the objects fbxGroupNames
+ tmp_obmapping[ob].fbxGroupNames.append(fbxGroupName) # also adds to the objects fbxGroupNames
- groups.sort() # not really needed
+ groups.sort() # not really needed
# Assign parents using this mapping
for ob_generic in ob_all_typegroups:
for my_ob in ob_generic:
parent = my_ob.blenObject.parent
- if parent and parent.tag: # does it exist and is it in the mapping
+ if parent and parent.tag: # does it exist and is it in the mapping
my_ob.fbxParent = tmp_obmapping[parent]
-
del tmp_obmapping
# Finished finding groups we use
-
- materials = [(sane_matname(mat_tex_pair), mat_tex_pair) for mat_tex_pair in materials.keys()]
- textures = [(sane_texname(tex), tex) for tex in textures.keys() if tex]
- materials.sort() # sort by name
+ materials = [(sane_matname(mat_tex_pair), mat_tex_pair) for mat_tex_pair in materials.keys()]
+ textures = [(sane_texname(tex), tex) for tex in textures.keys() if tex]
+ materials.sort() # sort by name
textures.sort()
camera_count = 8
@@ -2177,16 +2181,16 @@ def save(operator, context, filepath="",
Definitions: {
Version: 100
Count: %i''' % (\
- 1+1+camera_count+\
- len(ob_meshes)+\
- len(ob_lights)+\
- len(ob_cameras)+\
- len(ob_arms)+\
- len(ob_null)+\
- len(ob_bones)+\
- bone_deformer_count+\
- len(materials)+\
- (len(textures)*2))) # add 1 for the root model 1 for global settings
+ 1 + 1 + camera_count + \
+ len(ob_meshes) + \
+ len(ob_lights) + \
+ len(ob_cameras) + \
+ len(ob_arms) + \
+ len(ob_null) + \
+ len(ob_bones) + \
+ bone_deformer_count + \
+ len(materials) + \
+ (len(textures) * 2))) # add 1 for the root model 1 for global settings
del bone_deformer_count
@@ -2194,13 +2198,13 @@ Definitions: {
ObjectType: "Model" {
Count: %i
}''' % (\
- 1+camera_count+\
- len(ob_meshes)+\
- len(ob_lights)+\
- len(ob_cameras)+\
- len(ob_arms)+\
- len(ob_null)+\
- len(ob_bones))) # add 1 for the root model
+ 1 + camera_count + \
+ len(ob_meshes) + \
+ len(ob_lights) + \
+ len(ob_cameras) + \
+ len(ob_arms) + \
+ len(ob_null) + \
+ len(ob_bones))) # add 1 for the root model
file.write('''
ObjectType: "Geometry" {
@@ -2217,17 +2221,17 @@ Definitions: {
file.write('''
ObjectType: "Texture" {
Count: %i
- }''' % len(textures)) # add 1 for an empty tex
+ }''' % len(textures)) # add 1 for an empty tex
file.write('''
ObjectType: "Video" {
Count: %i
- }''' % len(textures)) # add 1 for an empty tex
+ }''' % len(textures)) # add 1 for an empty tex
tmp = 0
# Add deformer nodes
for my_mesh in ob_meshes:
if my_mesh.fbxArm:
- tmp+=1
+ tmp += 1
# Add subdeformers
for my_bone in ob_bones:
@@ -2270,7 +2274,7 @@ Objects: {''')
write_camera_switch()
# Write the null object
- write_null(None, 'blend_root')# , GLOBAL_MATRIX)
+ write_null(None, 'blend_root') # , GLOBAL_MATRIX)
for my_null in ob_null:
write_null(my_null)
@@ -2294,7 +2298,7 @@ Objects: {''')
write_camera_default()
for matname, (mat, tex) in materials:
- write_material(matname, mat) # We only need to have a material per image pair, but no need to write any image info into the material (dumb fbx standard)
+ write_material(matname, mat) # We only need to have a material per image pair, but no need to write any image info into the material (dumb fbx standard)
# each texture uses a video, odd
for texname, tex in textures:
@@ -2302,7 +2306,7 @@ Objects: {''')
i = 0
for texname, tex in textures:
write_texture(texname, tex, i)
- i+=1
+ i += 1
for groupname, group in groups:
write_group(groupname)
@@ -2338,16 +2342,14 @@ Objects: {''')
NbPoseNodes: ''')
file.write(str(len(pose_items)))
-
for fbxName, matrix in pose_items:
file.write('\n\t\tPoseNode: {')
- file.write('\n\t\t\tNode: "Model::%s"' % fbxName )
+ file.write('\n\t\t\tNode: "Model::%s"' % fbxName)
file.write('\n\t\t\tMatrix: %s' % mat4x4str(matrix if matrix else Matrix()))
file.write('\n\t\t}')
file.write('\n\t}')
-
# Finish Writing Objects
# Write global settings
file.write('''
@@ -2429,7 +2431,7 @@ Relations: {''')
#for bonename, bone, obname, me, armob in ob_bones:
for my_bone in ob_bones:
- for fbxMeshObName in my_bone.blenMeshes: # .keys() - fbxMeshObName
+ for fbxMeshObName in my_bone.blenMeshes: # .keys() - fbxMeshObName
# is this bone effecting a mesh?
file.write('\n\tDeformer: "SubDeformer::Cluster %s %s", "Cluster" {\n\t}' % (fbxMeshObName, my_bone.fbxName))
@@ -2451,11 +2453,10 @@ Connections: {''')
# for instance, defining the material->mesh connection
# before the mesh->blend_root crashes cinema4d
-
# write the fake root node
file.write('\n\tConnect: "OO", "Model::blend_root", "Model::Scene"')
- for ob_generic in ob_all_typegroups: # all blender 'Object's we support
+ for ob_generic in ob_all_typegroups: # all blender 'Object's we support
for my_ob in ob_generic:
if my_ob.fbxParent:
file.write('\n\tConnect: "OO", "Model::%s", "Model::%s"' % (my_ob.fbxName, my_ob.fbxParent.fbxName))
@@ -2466,11 +2467,8 @@ Connections: {''')
for my_mesh in ob_meshes:
# Connect all materials to all objects, not good form but ok for now.
for mat, tex in my_mesh.blenMaterials:
- if mat: mat_name = mat.name
- else: mat_name = None
-
- if tex: tex_name = tex.name
- else: tex_name = None
+ mat_name = mat.name if mat else None
+ tex_name = tex.name if tex else None
file.write('\n\tConnect: "OO", "Material::%s", "Model::%s"' % (sane_name_mapping_mat[mat_name, tex_name], my_mesh.fbxName))
@@ -2491,24 +2489,23 @@ Connections: {''')
#for bonename, bone, obname, me, armob in ob_bones:
for my_bone in ob_bones:
- for fbxMeshObName in my_bone.blenMeshes: # .keys()
+ for fbxMeshObName in my_bone.blenMeshes: # .keys()
file.write('\n\tConnect: "OO", "SubDeformer::Cluster %s %s", "Deformer::Skin %s"' % (fbxMeshObName, my_bone.fbxName, fbxMeshObName))
# limbs -> deformers
# for bonename, bone, obname, me, armob in ob_bones:
for my_bone in ob_bones:
- for fbxMeshObName in my_bone.blenMeshes: # .keys()
+ for fbxMeshObName in my_bone.blenMeshes: # .keys()
file.write('\n\tConnect: "OO", "Model::%s", "SubDeformer::Cluster %s %s"' % (my_bone.fbxName, fbxMeshObName, my_bone.fbxName))
-
#for bonename, bone, obname, me, armob in ob_bones:
for my_bone in ob_bones:
# Always parent to armature now
if my_bone.parent:
- file.write('\n\tConnect: "OO", "Model::%s", "Model::%s"' % (my_bone.fbxName, my_bone.parent.fbxName) )
+ file.write('\n\tConnect: "OO", "Model::%s", "Model::%s"' % (my_bone.fbxName, my_bone.parent.fbxName))
else:
# the armature object is written as an empty and all root level bones connect to it
- file.write('\n\tConnect: "OO", "Model::%s", "Model::%s"' % (my_bone.fbxName, my_bone.fbxArm.fbxName) )
+ file.write('\n\tConnect: "OO", "Model::%s", "Model::%s"' % (my_bone.fbxName, my_bone.fbxArm.fbxName))
# groups
if groups:
@@ -2522,7 +2519,6 @@ Connections: {''')
file.write('\n}')
-
# Needed for scene footer as well as animation
render = scene.render
@@ -2530,12 +2526,13 @@ Connections: {''')
#define KTIME_ONE_SECOND KTime (K_LONGLONG(46186158000))
def fbx_time(t):
# 0.5 + val is the same as rounding.
- return int(0.5 + ((t/fps) * 46186158000))
+ return int(0.5 + ((t / fps) * 46186158000))
fps = float(render.fps)
- start = scene.frame_start
- end = scene.frame_end
- if end < start: start, end = end, st
+ start = scene.frame_start
+ end = scene.frame_end
+ if end < start:
+ start, end = end, st
# comment the following line, otherwise we dont get the pose
# if start==end: ANIM_ENABLE = False
@@ -2562,7 +2559,6 @@ Connections: {''')
# bpy.data.actions.tag = False
tmp_actions = bpy.data.actions[:]
-
# find which actions are compatible with the armatures
# blenActions is not yet initialized so do it now.
tmp_act_count = 0
@@ -2576,10 +2572,10 @@ Connections: {''')
for action in tmp_actions:
- action_chan_names = arm_bone_names.intersection( set([g.name for g in action.groups]) )
+ action_chan_names = arm_bone_names.intersection(set([g.name for g in action.groups]))
# action_chan_names = arm_bone_names.intersection( set(action.getChannelNames()) )
- if action_chan_names: # at least one channel matches.
+ if action_chan_names: # at least one channel matches.
my_arm.blenActionList.append(action)
tagged_actions.append(action.name)
# action.tag = True
@@ -2595,7 +2591,7 @@ Connections: {''')
del action_lastcompat
- tmp_actions.insert(0, None) # None is the default action
+ tmp_actions.insert(0, None) # None is the default action
file.write('''
;Takes and animation section
@@ -2621,11 +2617,11 @@ Takes: {''')
if blenAction is None:
# Warning, this only accounts for tmp_actions being [None]
file.write('\n\tTake: "Default Take" {')
- act_start = start
- act_end = end
+ act_start = start
+ act_end = end
else:
# use existing name
- if blenAction == blenActionDefault: # have we already got the name
+ if blenAction == blenActionDefault: # have we already got the name
file.write('\n\tTake: "%s" {' % sane_name_mapping_take[blenAction.name])
else:
file.write('\n\tTake: "%s" {' % sane_takename(blenAction))
@@ -2641,16 +2637,15 @@ Takes: {''')
# print('\t\tSetting Action!', blenAction)
# scene.update(1)
- file.write('\n\t\tFileName: "Default_Take.tak"') # ??? - not sure why this is needed
- file.write('\n\t\tLocalTime: %i,%i' % (fbx_time(act_start-1), fbx_time(act_end-1))) # ??? - not sure why this is needed
- file.write('\n\t\tReferenceTime: %i,%i' % (fbx_time(act_start-1), fbx_time(act_end-1))) # ??? - not sure why this is needed
+ file.write('\n\t\tFileName: "Default_Take.tak"') # ??? - not sure why this is needed
+ file.write('\n\t\tLocalTime: %i,%i' % (fbx_time(act_start - 1), fbx_time(act_end - 1))) # ??? - not sure why this is needed
+ file.write('\n\t\tReferenceTime: %i,%i' % (fbx_time(act_start - 1), fbx_time(act_end - 1))) # ??? - not sure why this is needed
file.write('''
;Models animation
;----------------------------------------------------''')
-
# set pose data for all bones
# do this here incase the action changes
'''
@@ -2669,8 +2664,7 @@ Takes: {''')
else:
my_ob.setPoseFrame(i)
- i+=1
-
+ i += 1
#for bonename, bone, obname, me, armob in ob_bones:
for ob_generic in (ob_bones, ob_meshes, ob_null, ob_cameras, ob_lights, ob_arms):
@@ -2682,19 +2676,21 @@ Takes: {''')
pass
else:
- file.write('\n\t\tModel: "Model::%s" {' % my_ob.fbxName) # ??? - not sure why this is needed
+ file.write('\n\t\tModel: "Model::%s" {' % my_ob.fbxName) # ??? - not sure why this is needed
file.write('\n\t\t\tVersion: 1.1')
file.write('\n\t\t\tChannel: "Transform" {')
- context_bone_anim_mats = [ (my_ob.getAnimParRelMatrix(frame), my_ob.getAnimParRelMatrixRot(frame)) for frame in range(act_start, act_end+1) ]
+ context_bone_anim_mats = [(my_ob.getAnimParRelMatrix(frame), my_ob.getAnimParRelMatrixRot(frame)) for frame in range(act_start, act_end + 1)]
# ----------------
# ----------------
- for TX_LAYER, TX_CHAN in enumerate('TRS'): # transform, rotate, scale
+ for TX_LAYER, TX_CHAN in enumerate('TRS'): # transform, rotate, scale
- if TX_CHAN=='T': context_bone_anim_vecs = [mtx[0].translation_part() for mtx in context_bone_anim_mats]
- elif TX_CHAN=='S': context_bone_anim_vecs = [mtx[0].scale_part() for mtx in context_bone_anim_mats]
- elif TX_CHAN=='R':
+ if TX_CHAN == 'T':
+ context_bone_anim_vecs = [mtx[0].translation_part() for mtx in context_bone_anim_mats]
+ elif TX_CHAN == 'S':
+ context_bone_anim_vecs = [mtx[0].scale_part() for mtx in context_bone_anim_mats]
+ elif TX_CHAN == 'R':
# Was....
# elif TX_CHAN=='R': context_bone_anim_vecs = [mtx[1].to_euler() for mtx in context_bone_anim_mats]
#
@@ -2702,16 +2698,18 @@ Takes: {''')
context_bone_anim_vecs = []
prev_eul = None
for mtx in context_bone_anim_mats:
- if prev_eul: prev_eul = mtx[1].to_euler('XYZ', prev_eul)
- else: prev_eul = mtx[1].to_euler()
+ if prev_eul:
+ prev_eul = mtx[1].to_euler('XYZ', prev_eul)
+ else:
+ prev_eul = mtx[1].to_euler()
context_bone_anim_vecs.append(tuple_rad_to_deg(prev_eul))
- file.write('\n\t\t\t\tChannel: "%s" {' % TX_CHAN) # translation
+ file.write('\n\t\t\t\tChannel: "%s" {' % TX_CHAN) # translation
for i in range(3):
# Loop on each axis of the bone
- file.write('\n\t\t\t\t\tChannel: "%s" {'% ('XYZ'[i])) # translation
- file.write('\n\t\t\t\t\t\tDefault: %.15f' % context_bone_anim_vecs[0][i] )
+ file.write('\n\t\t\t\t\tChannel: "%s" {' % ('XYZ'[i])) # translation
+ file.write('\n\t\t\t\t\t\tDefault: %.15f' % context_bone_anim_vecs[0][i])
file.write('\n\t\t\t\t\t\tKeyVer: 4005')
if not ANIM_OPTIMIZE:
@@ -2720,45 +2718,45 @@ Takes: {''')
file.write('\n\t\t\t\t\t\tKey: ')
frame = act_start
while frame <= act_end:
- if frame!=act_start:
+ if frame != act_start:
file.write(',')
# Curve types are 'C,n' for constant, 'L' for linear
# C,n is for bezier? - linear is best for now so we can do simple keyframe removal
- file.write('\n\t\t\t\t\t\t\t%i,%.15f,L' % (fbx_time(frame-1), context_bone_anim_vecs[frame-act_start][i] ))
- frame+=1
+ file.write('\n\t\t\t\t\t\t\t%i,%.15f,L' % (fbx_time(frame - 1), context_bone_anim_vecs[frame - act_start][i]))
+ frame += 1
else:
# remove unneeded keys, j is the frame, needed when some frames are removed.
- context_bone_anim_keys = [ (vec[i], j) for j, vec in enumerate(context_bone_anim_vecs) ]
+ context_bone_anim_keys = [(vec[i], j) for j, vec in enumerate(context_bone_anim_vecs)]
# last frame to fisrt frame, missing 1 frame on either side.
# removeing in a backwards loop is faster
#for j in xrange( (act_end-act_start)-1, 0, -1 ):
# j = (act_end-act_start)-1
- j = len(context_bone_anim_keys)-2
+ j = len(context_bone_anim_keys) - 2
while j > 0 and len(context_bone_anim_keys) > 2:
# print j, len(context_bone_anim_keys)
# Is this key the same as the ones next to it?
# co-linear horizontal...
- if abs(context_bone_anim_keys[j][0] - context_bone_anim_keys[j-1][0]) < ANIM_OPTIMIZE_PRECISSION_FLOAT and\
- abs(context_bone_anim_keys[j][0] - context_bone_anim_keys[j+1][0]) < ANIM_OPTIMIZE_PRECISSION_FLOAT:
+ if abs(context_bone_anim_keys[j][0] - context_bone_anim_keys[j - 1][0]) < ANIM_OPTIMIZE_PRECISSION_FLOAT and \
+ abs(context_bone_anim_keys[j][0] - context_bone_anim_keys[j + 1][0]) < ANIM_OPTIMIZE_PRECISSION_FLOAT:
del context_bone_anim_keys[j]
else:
- frame_range = float(context_bone_anim_keys[j+1][1] - context_bone_anim_keys[j-1][1])
- frame_range_fac1 = (context_bone_anim_keys[j+1][1] - context_bone_anim_keys[j][1]) / frame_range
+ frame_range = float(context_bone_anim_keys[j + 1][1] - context_bone_anim_keys[j - 1][1])
+ frame_range_fac1 = (context_bone_anim_keys[j + 1][1] - context_bone_anim_keys[j][1]) / frame_range
frame_range_fac2 = 1.0 - frame_range_fac1
- if abs(((context_bone_anim_keys[j-1][0]*frame_range_fac1 + context_bone_anim_keys[j+1][0]*frame_range_fac2)) - context_bone_anim_keys[j][0]) < ANIM_OPTIMIZE_PRECISSION_FLOAT:
+ if abs(((context_bone_anim_keys[j - 1][0] * frame_range_fac1 + context_bone_anim_keys[j + 1][0] * frame_range_fac2)) - context_bone_anim_keys[j][0]) < ANIM_OPTIMIZE_PRECISSION_FLOAT:
del context_bone_anim_keys[j]
else:
- j-=1
+ j -= 1
# keep the index below the list length
- if j > len(context_bone_anim_keys)-2:
- j = len(context_bone_anim_keys)-2
+ if j > len(context_bone_anim_keys) - 2:
+ j = len(context_bone_anim_keys) - 2
if len(context_bone_anim_keys) == 2 and context_bone_anim_keys[0][0] == context_bone_anim_keys[1][0]:
@@ -2768,23 +2766,26 @@ Takes: {''')
# better write one, otherwise we loose poses with no animation
file.write('\n\t\t\t\t\t\tKeyCount: 1')
file.write('\n\t\t\t\t\t\tKey: ')
- file.write('\n\t\t\t\t\t\t\t%i,%.15f,L' % (fbx_time(start), context_bone_anim_keys[0][0]))
+ file.write('\n\t\t\t\t\t\t\t%i,%.15f,L' % (fbx_time(start), context_bone_anim_keys[0][0]))
else:
# We only need to write these if there is at least one
file.write('\n\t\t\t\t\t\tKeyCount: %i' % len(context_bone_anim_keys))
file.write('\n\t\t\t\t\t\tKey: ')
for val, frame in context_bone_anim_keys:
- if frame != context_bone_anim_keys[0][1]: # not the first
+ if frame != context_bone_anim_keys[0][1]: # not the first
file.write(',')
# frame is already one less then blenders frame
- file.write('\n\t\t\t\t\t\t\t%i,%.15f,L' % (fbx_time(frame), val ))
+ file.write('\n\t\t\t\t\t\t\t%i,%.15f,L' % (fbx_time(frame), val))
- if i==0: file.write('\n\t\t\t\t\t\tColor: 1,0,0')
- elif i==1: file.write('\n\t\t\t\t\t\tColor: 0,1,0')
- elif i==2: file.write('\n\t\t\t\t\t\tColor: 0,0,1')
+ if i == 0:
+ file.write('\n\t\t\t\t\t\tColor: 1,0,0')
+ elif i == 1:
+ file.write('\n\t\t\t\t\t\tColor: 0,1,0')
+ elif i == 2:
+ file.write('\n\t\t\t\t\t\tColor: 0,0,1')
file.write('\n\t\t\t\t\t}')
- file.write('\n\t\t\t\t\tLayerType: %i' % (TX_LAYER+1) )
+ file.write('\n\t\t\t\t\tLayerType: %i' % (TX_LAYER + 1))
file.write('\n\t\t\t\t}')
# ---------------
@@ -2814,11 +2815,9 @@ Takes: {''')
file.write('\n\tCurrent: ""')
file.write('\n}')
-
# write meshes animation
#for obname, ob, mtx, me, mats, arm, armname in ob_meshes:
-
# Clear mesh data Only when writing with modifiers applied
for me in meshes_to_clear:
bpy.data.meshes.remove(me)
@@ -2857,8 +2856,8 @@ Takes: {''')
file.write('\n\t\tTimeFormat: 1')
file.write('\n\t\tSnapOnFrames: 0')
file.write('\n\t\tReferenceTimeIndex: -1')
- file.write('\n\t\tTimeLineStartTime: %i' % fbx_time(start-1))
- file.write('\n\t\tTimeLineStopTime: %i' % fbx_time(end-1))
+ file.write('\n\t\tTimeLineStartTime: %i' % fbx_time(start - 1))
+ file.write('\n\t\tTimeLineStopTime: %i' % fbx_time(end - 1))
file.write('\n\t}')
file.write('\n\tRendererSetting: {')
file.write('\n\t\tDefaultCamera: "Producer Perspective"')
@@ -2874,19 +2873,17 @@ Takes: {''')
sane_name_mapping_take.clear()
sane_name_mapping_group.clear()
- ob_arms[:] = []
- ob_bones[:] = []
- ob_cameras[:] = []
- ob_lights[:] = []
- ob_meshes[:] = []
- ob_null[:] = []
-
+ ob_arms[:] = []
+ ob_bones[:] = []
+ ob_cameras[:] = []
+ ob_lights[:] = []
+ ob_meshes[:] = []
+ ob_null[:] = []
# copy images if enabled
# if EXP_IMAGE_COPY:
# # copy_images( basepath, [ tex[1] for tex in textures if tex[1] != None ])
# bpy.util.copy_images( [ tex[1] for tex in textures if tex[1] != None ], basepath)
-
file.close()
print('export finished in %.4f sec.' % (time.clock() - start_time))
diff --git a/release/scripts/op/io_shape_mdd/export_mdd.py b/release/scripts/op/io_shape_mdd/export_mdd.py
index 7fb11facf58..42795d450ab 100644
--- a/release/scripts/op/io_shape_mdd/export_mdd.py
+++ b/release/scripts/op/io_shape_mdd/export_mdd.py
@@ -73,12 +73,11 @@ def save(operator, context, filepath="", frame_start=1, frame_end=300, fps=25):
me = obj.create_mesh(scene, True, 'PREVIEW')
#Flip y and z
- mat_flip = mathutils.Matrix(\
- [1.0, 0.0, 0.0, 0.0],\
- [0.0, 0.0, 1.0, 0.0],\
- [0.0, 1.0, 0.0, 0.0],\
- [0.0, 0.0, 0.0, 1.0],\
- )
+ mat_flip = mathutils.Matrix(((1.0, 0.0, 0.0, 0.0), \
+ (0.0, 0.0, 1.0, 0.0), \
+ (0.0, 1.0, 0.0, 0.0), \
+ (0.0, 0.0, 0.0, 1.0), \
+ ))
numverts = len(me.vertices)
diff --git a/release/scripts/op/uvcalc_smart_project.py b/release/scripts/op/uvcalc_smart_project.py
index d4b5a38e11d..3a835ae671e 100644
--- a/release/scripts/op/uvcalc_smart_project.py
+++ b/release/scripts/op/uvcalc_smart_project.py
@@ -78,11 +78,7 @@ def pointInTri2D(v, v1, v2, v3):
nor = side1.cross(side2)
- l1 = [side1[0], side1[1], side1[2]]
- l2 = [side2[0], side2[1], side2[2]]
- l3 = [nor[0], nor[1], nor[2]]
-
- mtx = Matrix(l1, l2, l3)
+ mtx = Matrix((side1, side2, nor))
# Zero area 2d tri, even tho we throw away zerop area faces
# the projection UV can result in a zero area UV.
diff --git a/source/blender/blenkernel/intern/depsgraph.c b/source/blender/blenkernel/intern/depsgraph.c
index 61865f5579c..71c3e76c207 100644
--- a/source/blender/blenkernel/intern/depsgraph.c
+++ b/source/blender/blenkernel/intern/depsgraph.c
@@ -2452,7 +2452,7 @@ void DAG_id_tag_update(ID *id, short flag)
}
}
else {
- BKE_assert(!"invalid flag for this 'idtype'");
+ BLI_assert(!"invalid flag for this 'idtype'");
}
}
}
diff --git a/source/blender/blenkernel/intern/key.c b/source/blender/blenkernel/intern/key.c
index 328fb1b6a49..0955ccd170e 100644
--- a/source/blender/blenkernel/intern/key.c
+++ b/source/blender/blenkernel/intern/key.c
@@ -552,7 +552,7 @@ static short key_pointer_size(const Key *key, const int mode, int *poinsize, int
break;
default:
- BKE_assert(!"invalid 'key->from' ID type");
+ BLI_assert(!"invalid 'key->from' ID type");
return FALSE;
}
@@ -641,7 +641,7 @@ static void cp_key(const int start, int end, const int tot, char *poin, Key *key
/* should never happen */
if(freek1) MEM_freeN(freek1);
if(freekref) MEM_freeN(freekref);
- BKE_assert(!"invalid 'cp[1]'");
+ BLI_assert(!"invalid 'cp[1]'");
return;
}
@@ -777,7 +777,7 @@ void do_rel_key(const int start, int end, const int tot, char *basispoin, Key *k
/* should never happen */
if(freefrom) MEM_freeN(freefrom);
if(freereffrom) MEM_freeN(freereffrom);
- BKE_assert(!"invalid 'cp[1]'");
+ BLI_assert(!"invalid 'cp[1]'");
return;
}
@@ -944,7 +944,7 @@ static void do_key(const int start, int end, const int tot, char *poin, Key *key
if(freek2) MEM_freeN(freek2);
if(freek3) MEM_freeN(freek3);
if(freek4) MEM_freeN(freek4);
- BKE_assert(!"invalid 'cp[1]'");
+ BLI_assert(!"invalid 'cp[1]'");
return;
}
diff --git a/source/blender/blenkernel/intern/object.c b/source/blender/blenkernel/intern/object.c
index 1ebbdf78f61..b71338aaa70 100644
--- a/source/blender/blenkernel/intern/object.c
+++ b/source/blender/blenkernel/intern/object.c
@@ -2562,7 +2562,7 @@ void object_handle_update(Scene *scene, Object *ob)
{
#if 0 // XXX, comment for 2.56a release, background wont set 'scene->customdata_mask'
EditMesh *em = (ob == scene->obedit)? BKE_mesh_get_editmesh(ob->data): NULL;
- BKE_assert((scene->customdata_mask & CD_MASK_BAREMESH) == CD_MASK_BAREMESH);
+ BLI_assert((scene->customdata_mask & CD_MASK_BAREMESH) == CD_MASK_BAREMESH);
if(em) {
makeDerivedMesh(scene, ob, em, scene->customdata_mask); /* was CD_MASK_BAREMESH */
BKE_mesh_end_editmesh(ob->data, em);
diff --git a/source/blender/blenlib/BLI_utildefines.h b/source/blender/blenlib/BLI_utildefines.h
index 468c146a65d..ccda9c17d43 100644
--- a/source/blender/blenlib/BLI_utildefines.h
+++ b/source/blender/blenlib/BLI_utildefines.h
@@ -181,7 +181,7 @@
#endif
-/* BKE_assert(), default only to print
+/* BLI_assert(), default only to print
* for aborting need to define WITH_ASSERT_ABORT
*/
#if !defined NDEBUG
@@ -191,28 +191,28 @@
# define _dummy_abort() (void)0
# endif
# ifdef __GNUC__ /* just want to check if __func__ is available */
-# define BKE_assert(a) \
+# define BLI_assert(a) \
do { \
if (0 == (a)) { \
fprintf(stderr, \
- "BKE_assert failed: %s, %s(), %d at \'%s\'\n", \
+ "BLI_assert failed: %s, %s(), %d at \'%s\'\n", \
__FILE__, __func__, __LINE__, STRINGIFY(a)); \
_dummy_abort(); \
} \
} while (0)
# else
-# define BKE_assert(a) \
+# define BLI_assert(a) \
do { \
if (0 == (a)) { \
fprintf(stderr, \
- "BKE_assert failed: %s, %d at \'%s\'\n", \
+ "BLI_assert failed: %s, %d at \'%s\'\n", \
__FILE__, __LINE__, STRINGIFY(a)); \
_dummy_abort(); \
} \
} while (0)
# endif
#else
-# define BKE_assert(a) (void)0
+# define BLI_assert(a) (void)0
#endif
#endif // BLI_UTILDEFINES_H
diff --git a/source/blender/editors/interface/interface_icons.c b/source/blender/editors/interface/interface_icons.c
index 7d2f8e0d32a..ff041c18b00 100644
--- a/source/blender/editors/interface/interface_icons.c
+++ b/source/blender/editors/interface/interface_icons.c
@@ -851,7 +851,7 @@ static void icon_draw_rect(float x, float y, int w, int h, float UNUSED(aspect),
/* sanity check */
if(w<=0 || h<=0 || w>2000 || h>2000) {
printf("icon_draw_rect: icons are %i x %i pixels?\n", w, h);
- BKE_assert(!"invalid icon size");
+ BLI_assert(!"invalid icon size");
return;
}
diff --git a/source/blender/editors/interface/resources.c b/source/blender/editors/interface/resources.c
index 9c5f4067204..b2f74e435b1 100644
--- a/source/blender/editors/interface/resources.c
+++ b/source/blender/editors/interface/resources.c
@@ -981,7 +981,7 @@ void UI_make_axis_color(const unsigned char src_col[3], unsigned char dst_col[3]
dst_col[2]= src_col[2]>209?255:src_col[2]+46;
break;
default:
- BKE_assert(!"invalid axis arg");
+ BLI_assert(!"invalid axis arg");
}
}
diff --git a/source/blender/editors/space_view3d/drawobject.c b/source/blender/editors/space_view3d/drawobject.c
index 17e8b09ad68..8b77ff9bbdc 100644
--- a/source/blender/editors/space_view3d/drawobject.c
+++ b/source/blender/editors/space_view3d/drawobject.c
@@ -3326,13 +3326,15 @@ static void draw_particle(ParticleKey *state, int draw_as, short draw, float pix
vec[1]=vec[2]=0.0;
mul_qt_v3(state->rot,vec);
if(draw_as==PART_DRAW_AXIS) {
- cd[1]=cd[2]=cd[4]=cd[5]=0.0;
- cd[0]=cd[3]=1.0;
- cd[6]=cd[8]=cd[9]=cd[11]=0.0;
- cd[7]=cd[10]=1.0;
- cd[13]=cd[12]=cd[15]=cd[16]=0.0;
- cd[14]=cd[17]=1.0;
- pdd->cd+=18;
+ if(cd) {
+ cd[1]=cd[2]=cd[4]=cd[5]=0.0;
+ cd[0]=cd[3]=1.0;
+ cd[6]=cd[8]=cd[9]=cd[11]=0.0;
+ cd[7]=cd[10]=1.0;
+ cd[13]=cd[12]=cd[15]=cd[16]=0.0;
+ cd[14]=cd[17]=1.0;
+ pdd->cd+=18;
+ }
copy_v3_v3(vec2,state->co);
}
diff --git a/source/blender/editors/transform/transform_manipulator.c b/source/blender/editors/transform/transform_manipulator.c
index b4fe38b41ac..d69ba22585b 100644
--- a/source/blender/editors/transform/transform_manipulator.c
+++ b/source/blender/editors/transform/transform_manipulator.c
@@ -734,7 +734,7 @@ static void manipulator_setcolor(View3D *v3d, char axis, int colcode, unsigned c
col[2]= 220;
break;
default:
- BKE_assert(!"invalid axis arg");
+ BLI_assert(!"invalid axis arg");
}
}
diff --git a/source/blender/makesrna/intern/rna_access.c b/source/blender/makesrna/intern/rna_access.c
index d509efbb927..bf5beb05b06 100644
--- a/source/blender/makesrna/intern/rna_access.c
+++ b/source/blender/makesrna/intern/rna_access.c
@@ -3348,7 +3348,7 @@ static char *rna_idp_path(PointerRNA *ptr, IDProperty *haystack, IDProperty *nee
IDProperty *iter;
int i;
- BKE_assert(haystack->type == IDP_GROUP);
+ BLI_assert(haystack->type == IDP_GROUP);
link.up= parent_link;
link.name= NULL;
@@ -3408,7 +3408,7 @@ static char *rna_path_from_ID_to_idpgroup(PointerRNA *ptr)
IDProperty *haystack;
IDProperty *needle;
- BKE_assert(ptr->id.data != NULL);
+ BLI_assert(ptr->id.data != NULL);
RNA_id_pointer_create(ptr->id.data, &id_ptr);
haystack= RNA_struct_idprops(&id_ptr, FALSE);
diff --git a/source/blender/python/generic/IDProp.c b/source/blender/python/generic/IDProp.c
index 7bf7f06f989..9e4aa88354d 100644
--- a/source/blender/python/generic/IDProp.c
+++ b/source/blender/python/generic/IDProp.c
@@ -261,7 +261,7 @@ static int idp_sequence_type(PyObject *seq)
PyObject *item;
int type= IDP_INT;
- int i, len = PySequence_Length(seq);
+ int i, len = PySequence_Size(seq);
for (i=0; i < len; i++) {
item = PySequence_GetItem(seq, i);
if (PyFloat_Check(item)) {
@@ -331,7 +331,7 @@ const char *BPy_IDProperty_Map_ValidateAndCreate(const char *name, IDProperty *g
we assume IDP_INT unless we hit a float
number; then we assume it's */
- val.array.len = PySequence_Length(ob);
+ val.array.len = PySequence_Size(ob);
switch(val.array.type) {
case IDP_DOUBLE:
diff --git a/source/blender/python/generic/bgl.c b/source/blender/python/generic/bgl.c
index e72f2136ee5..3d525b08ecc 100644
--- a/source/blender/python/generic/bgl.c
+++ b/source/blender/python/generic/bgl.c
@@ -207,7 +207,7 @@ static PyObject *Method_Buffer (PyObject *UNUSED(self), PyObject *args)
ndimensions= 1;
dimensions[0]= PyLong_AsLong(length_ob);
} else if (PySequence_Check(length_ob)) {
- ndimensions= PySequence_Length(length_ob);
+ ndimensions= PySequence_Size(length_ob);
if (ndimensions > MAX_DIMENSIONS) {
PyErr_SetString(PyExc_AttributeError, "too many dimensions, max is 256");
return NULL;
@@ -360,8 +360,8 @@ static int Buffer_ass_slice(PyObject *self, int begin, int end, PyObject *seq)
return -1;
}
- if (PySequence_Length(seq)!=(end-begin)) {
- int seq_len = PySequence_Length(seq);
+ if (PySequence_Size(seq)!=(end-begin)) {
+ int seq_len = PySequence_Size(seq);
char err_str[128];
sprintf(err_str, "size mismatch in assignment. Expected size: %d (size provided: %d)", seq_len, (end-begin));
PyErr_SetString(PyExc_TypeError, err_str);
diff --git a/source/blender/python/generic/mathutils_matrix.c b/source/blender/python/generic/mathutils_matrix.c
index 35d58ceb255..2418f746dd0 100644
--- a/source/blender/python/generic/mathutils_matrix.c
+++ b/source/blender/python/generic/mathutils_matrix.c
@@ -31,7 +31,7 @@
#include "BLI_blenlib.h"
#include "BLI_utildefines.h"
-
+static int Matrix_ass_slice(MatrixObject * self, int begin, int end, PyObject *value);
/* matrix vector callbacks */
int mathutils_matrix_vector_cb_index= -1;
@@ -109,80 +109,42 @@ Mathutils_Callback mathutils_matrix_vector_cb = {
//create a new matrix type
static PyObject *Matrix_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
{
- PyObject *argObject, *m, *s;
- MatrixObject *mat;
- int argSize, seqSize = 0, i, j;
- float matrix[16] = {0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f,
- 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 1.0f};
- float scalar;
-
if(kwds && PyDict_Size(kwds)) {
PyErr_SetString(PyExc_TypeError, "mathutils.Matrix(): takes no keyword args");
return NULL;
}
- argSize = PyTuple_GET_SIZE(args);
- if(argSize > MATRIX_MAX_DIM) { //bad arg nums
- PyErr_SetString(PyExc_AttributeError, "mathutils.Matrix(): expects 0-4 numeric sequences of the same size");
- return NULL;
- } else if (argSize == 0) { //return empty 4D matrix
- return (PyObject *) newMatrixObject(NULL, 4, 4, Py_NEW, type);
- }else if (argSize == 1){
- //copy constructor for matrix objects
- argObject = PyTuple_GET_ITEM(args, 0);
- if(MatrixObject_Check(argObject)){
- mat = (MatrixObject*)argObject;
- if(!BaseMath_ReadCallback(mat))
- return NULL;
+ switch(PyTuple_GET_SIZE(args)) {
+ case 0:
+ return (PyObject *) newMatrixObject(NULL, 4, 4, Py_NEW, type);
+ case 1:
+ {
+ PyObject *arg= PyTuple_GET_ITEM(args, 0);
- memcpy(matrix, mat->contigPtr, sizeof(float) * mat->rowSize * mat->colSize);
- argSize = mat->rowSize;
- seqSize = mat->colSize;
- }
- }else{ //2-4 arguments (all seqs? all same size?)
- for(i =0; i < argSize; i++){
- argObject = PyTuple_GET_ITEM(args, i);
- if (PySequence_Check(argObject)) { //seq?
- if(seqSize){ //0 at first
- if(PySequence_Length(argObject) != seqSize){ //seq size not same
- PyErr_SetString(PyExc_AttributeError, "mathutils.Matrix(): expects 0-4 numeric sequences of the same size");
- return NULL;
- }
- }
- seqSize = PySequence_Length(argObject);
- }else{ //arg not a sequence
- PyErr_SetString(PyExc_TypeError, "mathutils.Matrix(): expects 0-4 numeric sequences of the same size");
- return NULL;
- }
- }
- //all is well... let's continue parsing
- for (i = 0; i < argSize; i++){
- m = PyTuple_GET_ITEM(args, i);
- if (m == NULL) { // Failed to read sequence
- PyErr_SetString(PyExc_RuntimeError, "mathutils.Matrix(): failed to parse arguments");
- return NULL;
- }
+ const unsigned short row_size= PySequence_Size(arg); /* -1 is an error, size checks will accunt for this */
- for (j = 0; j < seqSize; j++) {
- s = PySequence_GetItem(m, j);
- if (s == NULL) { // Failed to read sequence
- PyErr_SetString(PyExc_RuntimeError, "mathutils.Matrix(): failed to parse arguments");
- return NULL;
- }
-
- scalar= (float)PyFloat_AsDouble(s);
- Py_DECREF(s);
-
- if(scalar==-1 && PyErr_Occurred()) { // parsed item is not a number
- PyErr_SetString(PyExc_AttributeError, "mathutils.Matrix(): expects 0-4 numeric sequences of the same size");
- return NULL;
- }
+ if(IN_RANGE_INCL(row_size, 2, 4)) {
+ PyObject *item= PySequence_GetItem(arg, 0);
+ const unsigned short col_size= PySequence_Size(item);
+ Py_XDECREF(item);
- matrix[(seqSize*i)+j]= scalar;
+ if(IN_RANGE_INCL(col_size, 2, 4)) {
+ /* sane row & col size, new matrix and assign as slice */
+ PyObject *matrix= newMatrixObject(NULL, row_size, col_size, Py_NEW, type);
+ if(Matrix_ass_slice((MatrixObject *)matrix, 0, INT_MAX, arg) == 0) {
+ return matrix;
+ }
+ else { /* matrix ok, slice assignment not */
+ Py_DECREF(matrix);
+ }
+ }
}
}
}
- return newMatrixObject(matrix, argSize, seqSize, Py_NEW, type);
+
+ /* will overwrite error */
+ PyErr_SetString(PyExc_TypeError, "mathutils.Matrix(): expects no args or 2-4 numeric sequences");
+ return NULL;
}
/*-----------------------CLASS-METHODS----------------------------*/
@@ -1554,7 +1516,7 @@ static PyObject *Matrix_mul(PyObject * m1, PyObject * m2)
}
}
else {
- BKE_assert(!"internal error");
+ BLI_assert(!"internal error");
}
PyErr_Format(PyExc_TypeError, "Matrix multiplication: not supported between '%.200s' and '%.200s' types", Py_TYPE(m1)->tp_name, Py_TYPE(m2)->tp_name);
@@ -1847,7 +1809,7 @@ self->matrix[1][1] = self->contigPtr[4] */
(i.e. it was allocated elsewhere by MEM_mallocN())
pass Py_NEW - if vector is not a WRAPPER and managed by PYTHON
(i.e. it must be created here with PyMEM_malloc())*/
-PyObject *newMatrixObject(float *mat, int rowSize, int colSize, int type, PyTypeObject *base_type)
+PyObject *newMatrixObject(float *mat, const unsigned short rowSize, const unsigned short colSize, int type, PyTypeObject *base_type)
{
MatrixObject *self;
int x, row, col;
diff --git a/source/blender/python/generic/mathutils_matrix.h b/source/blender/python/generic/mathutils_matrix.h
index f1cce3a45a8..863cfeb5e05 100644
--- a/source/blender/python/generic/mathutils_matrix.h
+++ b/source/blender/python/generic/mathutils_matrix.h
@@ -49,7 +49,7 @@ be stored in py_data) or be a wrapper for data allocated through
blender (stored in blend_data). This is an either/or struct not both*/
/*prototypes*/
-PyObject *newMatrixObject(float *mat, int rowSize, int colSize, int type, PyTypeObject *base_type);
+PyObject *newMatrixObject(float *mat, const unsigned short rowSize, const unsigned short colSize, int type, PyTypeObject *base_type);
PyObject *newMatrixObject_cb(PyObject *user, int rowSize, int colSize, int cb_type, int cb_subtype);
extern int mathutils_matrix_vector_cb_index;
diff --git a/source/blender/python/generic/mathutils_quat.c b/source/blender/python/generic/mathutils_quat.c
index 1de249205b2..18eabcb77a1 100644
--- a/source/blender/python/generic/mathutils_quat.c
+++ b/source/blender/python/generic/mathutils_quat.c
@@ -684,7 +684,7 @@ static PyObject *Quaternion_mul(PyObject * q1, PyObject * q2)
}
}
else {
- BKE_assert(!"internal error");
+ BLI_assert(!"internal error");
}
PyErr_Format(PyExc_TypeError, "Quaternion multiplication: not supported between '%.200s' and '%.200s' types", Py_TYPE(q1)->tp_name, Py_TYPE(q2)->tp_name);
diff --git a/source/blender/python/generic/mathutils_vector.c b/source/blender/python/generic/mathutils_vector.c
index 2208046e6c0..cc9f9d32819 100644
--- a/source/blender/python/generic/mathutils_vector.c
+++ b/source/blender/python/generic/mathutils_vector.c
@@ -844,7 +844,7 @@ static int Vector_ass_slice(VectorObject *self, int begin, int end,
CLAMP(end, 0, self->size);
begin = MIN2(begin,end);
- size = PySequence_Length(seq);
+ size = PySequence_Size(seq);
if(size != (end - begin)){
PyErr_SetString(PyExc_TypeError, "vector[begin:end] = []: size mismatch in slice assignment");
return -1;
@@ -1129,7 +1129,7 @@ static PyObject *Vector_mul(PyObject * v1, PyObject * v2)
}
}
else {
- BKE_assert(!"internal error");
+ BLI_assert(!"internal error");
}
PyErr_Format(PyExc_TypeError, "Vector multiplication: not supported between '%.200s' and '%.200s' types", Py_TYPE(v1)->tp_name, Py_TYPE(v2)->tp_name);
diff --git a/source/blender/python/intern/bpy_interface.c b/source/blender/python/intern/bpy_interface.c
index 3c76e1a703f..f2ba9f604be 100644
--- a/source/blender/python/intern/bpy_interface.c
+++ b/source/blender/python/intern/bpy_interface.c
@@ -269,7 +269,7 @@ void BPY_python_start( int argc, char **argv )
Py_DECREF(mod);
}
else {
- BKE_assert(!"unable to load 'imp' module.");
+ BLI_assert(!"unable to load 'imp' module.");
}
}
@@ -319,7 +319,7 @@ static int python_script_exec(bContext *C, const char *fn, struct Text *text, st
PyObject *py_dict= NULL, *py_result= NULL;
PyGILState_STATE gilstate;
- BKE_assert(fn || text);
+ BLI_assert(fn || text);
if (fn==NULL && text==NULL) {
return 0;
diff --git a/source/blender/python/intern/bpy_props.c b/source/blender/python/intern/bpy_props.c
index dd3a49ad8b1..bdd1f0575bd 100644
--- a/source/blender/python/intern/bpy_props.c
+++ b/source/blender/python/intern/bpy_props.c
@@ -552,7 +552,7 @@ static EnumPropertyItem *enum_items_from_py(PyObject *value, PyObject *def, int
return NULL;
}
- seq_len= PySequence_Length(value);
+ seq_len= PySequence_Size(value);
if(is_enum_flag) {
if(seq_len > RNA_ENUM_BITFLAG_SIZE) {
diff --git a/source/blender/python/intern/bpy_rna.c b/source/blender/python/intern/bpy_rna.c
index 663c82e07ad..10b029084c8 100644
--- a/source/blender/python/intern/bpy_rna.c
+++ b/source/blender/python/intern/bpy_rna.c
@@ -75,7 +75,7 @@ static int rna_id_write_error(PointerRNA *ptr, PyObject *key)
else pyname= "<UNKNOWN>";
/* make a nice string error */
- BKE_assert(idtype != NULL);
+ BLI_assert(idtype != NULL);
PyErr_Format(PyExc_RuntimeError, "Writing to ID classes in this context is not allowed: %.200s, %.200s datablock, error setting %.200s.%.200s", id->name+2, idtype, RNA_struct_identifier(ptr->type), pyname);
return TRUE;
@@ -1250,7 +1250,7 @@ static int pyrna_py_to_prop(PointerRNA *ptr, PropertyRNA *prop, void *data, PyOb
return -1;
}
- seq_len = PySequence_Length(value);
+ seq_len = PySequence_Size(value);
for(i=0; i<seq_len; i++) {
item= PySequence_GetItem(value, i);
@@ -1572,7 +1572,7 @@ static PyObject *pyrna_prop_array_subscript_slice(BPy_PropertyArrayRNA *self, Po
break;
}
default:
- BKE_assert(!"Invalid array type");
+ BLI_assert(!"Invalid array type");
PyErr_SetString(PyExc_TypeError, "not an array type");
Py_DECREF(tuple);
@@ -2779,7 +2779,7 @@ static PyObject *pyrna_struct_getattro( BPy_StructRNA *self, PyObject *pyname )
break;
default:
/* should never happen */
- BKE_assert(!"Invalid context type");
+ BLI_assert(!"Invalid context type");
PyErr_Format(PyExc_AttributeError, "bpy_struct: Context type invalid %d, can't get \"%.200s\" from context", newtype, name);
ret= NULL;
@@ -3279,7 +3279,7 @@ static int foreach_parse_args(
return -1;
}
- *tot= PySequence_Length(*seq); // TODO - buffer may not be a sequence! array.array() is tho.
+ *tot= PySequence_Size(*seq); // TODO - buffer may not be a sequence! array.array() is tho.
if(*tot>0) {
foreach_attr_type(self, *attr, raw_type, attr_tot, attr_signed);
@@ -3400,7 +3400,7 @@ static PyObject *foreach_getset(BPy_PropertyRNA *self, PyObject *args, int set)
break;
case PROP_RAW_UNSET:
/* should never happen */
- BKE_assert(!"Invalid array type - set");
+ BLI_assert(!"Invalid array type - set");
break;
}
@@ -3455,7 +3455,7 @@ static PyObject *foreach_getset(BPy_PropertyRNA *self, PyObject *args, int set)
break;
case PROP_RAW_UNSET:
/* should never happen */
- BKE_assert(!"Invalid array type - get");
+ BLI_assert(!"Invalid array type - get");
break;
}
@@ -3813,7 +3813,7 @@ static PyObject * pyrna_func_call(PyObject *self, PyObject *args, PyObject *kw)
void *retdata_single= NULL;
/* Should never happen but it does in rare cases */
- BKE_assert(self_ptr != NULL);
+ BLI_assert(self_ptr != NULL);
if(self_ptr==NULL) {
PyErr_SetString(PyExc_RuntimeError, "rna functions internal rna pointer is NULL, this is a bug. aborting");
diff --git a/source/blender/python/intern/bpy_rna_array.c b/source/blender/python/intern/bpy_rna_array.c
index 8e815e34cbc..a0dbc2d5ff2 100644
--- a/source/blender/python/intern/bpy_rna_array.c
+++ b/source/blender/python/intern/bpy_rna_array.c
@@ -57,7 +57,7 @@ static int validate_array_type(PyObject *seq, int dim, int totdim, int dimsize[]
if (dim + 1 < totdim) {
/* check that a sequence contains dimsize[dim] items */
- for (i= 0; i < PySequence_Length(seq); i++) {
+ for (i= 0; i < PySequence_Size(seq); i++) {
PyObject *item;
int ok= 1;
item= PySequence_GetItem(seq, i);
@@ -72,7 +72,7 @@ static int validate_array_type(PyObject *seq, int dim, int totdim, int dimsize[]
dimsize[2]=5
dim=0 */
- else if (PySequence_Length(item) != dimsize[dim + 1]) {
+ else if (PySequence_Size(item) != dimsize[dim + 1]) {
/* BLI_snprintf(error_str, error_str_size, "sequences of dimension %d should contain %d items", (int)dim + 1, (int)dimsize[dim + 1]); */
PyErr_Format(PyExc_ValueError, "%s sequences of dimension %d should contain %d items", error_prefix, (int)dim + 1, (int)dimsize[dim + 1]);
ok= 0;
@@ -89,7 +89,7 @@ static int validate_array_type(PyObject *seq, int dim, int totdim, int dimsize[]
}
else {
/* check that items are of correct type */
- for (i= 0; i < PySequence_Length(seq); i++) {
+ for (i= 0; i < PySequence_Size(seq); i++) {
PyObject *item= PySequence_GetItem(seq, i);
if (!check_item_type(item)) {
@@ -114,7 +114,7 @@ static int count_items(PyObject *seq)
if (PySequence_Check(seq)) {
int i;
- for (i= 0; i < PySequence_Length(seq); i++) {
+ for (i= 0; i < PySequence_Size(seq); i++) {
PyObject *item= PySequence_GetItem(seq, i);
totitem += count_items(item);
Py_DECREF(item);
@@ -211,7 +211,7 @@ static char *copy_values(PyObject *seq, PointerRNA *ptr, PropertyRNA *prop, int
unsigned int i;
int totdim= RNA_property_array_dimension(ptr, prop, NULL);
- for (i= 0; i < PySequence_Length(seq); i++) {
+ for (i= 0; i < PySequence_Size(seq); i++) {
PyObject *item= PySequence_GetItem(seq, i);
if (dim + 1 < totdim) {
diff --git a/source/blender/windowmanager/intern/wm_files.c b/source/blender/windowmanager/intern/wm_files.c
index 518f7c4997a..0b88f0f42bf 100644
--- a/source/blender/windowmanager/intern/wm_files.c
+++ b/source/blender/windowmanager/intern/wm_files.c
@@ -341,7 +341,7 @@ void WM_read_file(bContext *C, const char *name, ReportList *reports)
}
else {
BKE_reportf(reports, RPT_ERROR, "Unknown error loading: \"%s\".", name);
- BKE_assert(!"invalid 'retval'");
+ BLI_assert(!"invalid 'retval'");
}
WM_cursor_wait(0);
diff --git a/source/gameengine/VideoTexture/FilterBlueScreen.cpp b/source/gameengine/VideoTexture/FilterBlueScreen.cpp
index 2a624b2ccaa..73b104d4585 100644
--- a/source/gameengine/VideoTexture/FilterBlueScreen.cpp
+++ b/source/gameengine/VideoTexture/FilterBlueScreen.cpp
@@ -80,7 +80,7 @@ static PyObject * getColor (PyFilter * self, void * closure)
static int setColor (PyFilter * self, PyObject * value, void * closure)
{
// check validity of parameter
- if (value == NULL || !PySequence_Check(value) || PySequence_Length(value) != 3
+ if (value == NULL || !PySequence_Check(value) || PySequence_Size(value) != 3
|| !PyLong_Check(PySequence_Fast_GET_ITEM(value, 0))
|| !PyLong_Check(PySequence_Fast_GET_ITEM(value, 1))
|| !PyLong_Check(PySequence_Fast_GET_ITEM(value, 2)))
@@ -107,7 +107,7 @@ static PyObject * getLimits (PyFilter * self, void * closure)
static int setLimits (PyFilter * self, PyObject * value, void * closure)
{
// check validity of parameter
- if (value == NULL || !PySequence_Check(value) || PySequence_Length(value) != 2
+ if (value == NULL || !PySequence_Check(value) || PySequence_Size(value) != 2
|| !PyLong_Check(PySequence_Fast_GET_ITEM(value, 0))
|| !PyLong_Check(PySequence_Fast_GET_ITEM(value, 1)))
{
diff --git a/source/gameengine/VideoTexture/FilterColor.cpp b/source/gameengine/VideoTexture/FilterColor.cpp
index e5d479747c4..7bbbc86e35f 100644
--- a/source/gameengine/VideoTexture/FilterColor.cpp
+++ b/source/gameengine/VideoTexture/FilterColor.cpp
@@ -129,14 +129,14 @@ static int setMatrix (PyFilter * self, PyObject * value, void * closure)
ColorMatrix mat;
// check validity of parameter
bool valid = value != NULL && PySequence_Check(value)
- && PySequence_Length(value) == 4;
+ && PySequence_Size(value) == 4;
// check rows
for (int r = 0; valid && r < 4; ++r)
{
// get row object
PyObject * row = PySequence_Fast_GET_ITEM(value, r);
// check sequence
- valid = PySequence_Check(row) && PySequence_Length(row) == 5;
+ valid = PySequence_Check(row) && PySequence_Size(row) == 5;
// check items
for (int c = 0; valid && c < 5; ++c)
{
@@ -262,14 +262,14 @@ static int setLevels (PyFilter * self, PyObject * value, void * closure)
ColorLevel lev;
// check validity of parameter
bool valid = value != NULL && PySequence_Check(value)
- && PySequence_Length(value) == 4;
+ && PySequence_Size(value) == 4;
// check rows
for (int r = 0; valid && r < 4; ++r)
{
// get row object
PyObject * row = PySequence_Fast_GET_ITEM(value, r);
// check sequence
- valid = PySequence_Check(row) && PySequence_Length(row) == 2;
+ valid = PySequence_Check(row) && PySequence_Size(row) == 2;
// check items
for (int c = 0; valid && c < 2; ++c)
{
diff --git a/source/gameengine/VideoTexture/ImageRender.cpp b/source/gameengine/VideoTexture/ImageRender.cpp
index 0aba4be1121..2325329d55b 100644
--- a/source/gameengine/VideoTexture/ImageRender.cpp
+++ b/source/gameengine/VideoTexture/ImageRender.cpp
@@ -336,7 +336,7 @@ PyObject * getBackground (PyImage * self, void * closure)
static int setBackground (PyImage * self, PyObject * value, void * closure)
{
// check validity of parameter
- if (value == NULL || !PySequence_Check(value) || PySequence_Length(value) != 4
+ if (value == NULL || !PySequence_Check(value) || PySequence_Size(value) != 4
|| !PyLong_Check(PySequence_Fast_GET_ITEM(value, 0))
|| !PyLong_Check(PySequence_Fast_GET_ITEM(value, 1))
|| !PyLong_Check(PySequence_Fast_GET_ITEM(value, 2))
diff --git a/source/gameengine/VideoTexture/ImageViewport.cpp b/source/gameengine/VideoTexture/ImageViewport.cpp
index 5a4e8af1b0c..50f5244138a 100644
--- a/source/gameengine/VideoTexture/ImageViewport.cpp
+++ b/source/gameengine/VideoTexture/ImageViewport.cpp
@@ -225,7 +225,7 @@ static PyObject * ImageViewport_getPosition (PyImage * self, void * closure)
static int ImageViewport_setPosition (PyImage * self, PyObject * value, void * closure)
{
// check validity of parameter
- if (value == NULL || !PySequence_Check(value) || PySequence_Length(value) != 2
+ if (value == NULL || !PySequence_Check(value) || PySequence_Size(value) != 2
|| !PyLong_Check(PySequence_Fast_GET_ITEM(value, 0))
|| !PyLong_Check(PySequence_Fast_GET_ITEM(value, 1)))
{
@@ -253,7 +253,7 @@ PyObject * ImageViewport_getCaptureSize (PyImage * self, void * closure)
int ImageViewport_setCaptureSize (PyImage * self, PyObject * value, void * closure)
{
// check validity of parameter
- if (value == NULL || !PySequence_Check(value) || PySequence_Length(value) != 2
+ if (value == NULL || !PySequence_Check(value) || PySequence_Size(value) != 2
|| !PyLong_Check(PySequence_Fast_GET_ITEM(value, 0))
|| !PyLong_Check(PySequence_Fast_GET_ITEM(value, 1)))
{
diff --git a/source/gameengine/VideoTexture/VideoBase.cpp b/source/gameengine/VideoTexture/VideoBase.cpp
index 322ede7004d..bd620309926 100644
--- a/source/gameengine/VideoTexture/VideoBase.cpp
+++ b/source/gameengine/VideoTexture/VideoBase.cpp
@@ -147,7 +147,7 @@ PyObject * Video_getRange (PyImage * self, void * closure)
int Video_setRange (PyImage * self, PyObject * value, void * closure)
{
// check validity of parameter
- if (value == NULL || !PySequence_Check(value) || PySequence_Length(value) != 2
+ if (value == NULL || !PySequence_Check(value) || PySequence_Size(value) != 2
|| !PyFloat_Check(PySequence_Fast_GET_ITEM(value, 0))
|| !PyFloat_Check(PySequence_Fast_GET_ITEM(value, 1)))
{