Welcome to mirror list, hosted at ThFree Co, Russian Federation.

git.blender.org/blender.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--release/io/export_obj.py978
-rw-r--r--release/io/export_ply.py6
-rw-r--r--release/scripts/3ds_export.py24
-rw-r--r--release/scripts/export_obj-2.5.py1217
-rw-r--r--release/ui/space_script.py4
-rw-r--r--source/blender/makesrna/intern/rna_mesh_api.c15
-rw-r--r--source/blender/makesrna/intern/rna_object_api.c60
-rw-r--r--source/blender/python/intern/bpy_interface.c2
-rw-r--r--source/blender/python/intern/bpy_operator_wrap.c2
-rw-r--r--source/blender/python/intern/bpy_rna.c8
-rw-r--r--source/blender/python/intern/bpy_sys.c460
-rw-r--r--source/blender/python/intern/bpy_sys.h41
12 files changed, 1497 insertions, 1320 deletions
diff --git a/release/io/export_obj.py b/release/io/export_obj.py
index 8b3bcfb26b3..d139e872251 100644
--- a/release/io/export_obj.py
+++ b/release/io/export_obj.py
@@ -1,83 +1,961 @@
+#!BPY
+
+"""
+Name: 'Wavefront (.obj)...'
+Blender: 248
+Group: 'Export'
+Tooltip: 'Save a Wavefront OBJ File'
+"""
+
+__author__ = "Campbell Barton, Jiri Hnidek, Paolo Ciccone"
+__url__ = ['http://wiki.blender.org/index.php/Scripts/Manual/Export/wavefront_obj', 'www.blender.org', 'blenderartists.org']
+__version__ = "1.21"
+
+__bpydoc__ = """\
+This script is an exporter to OBJ file format.
+
+Usage:
+
+Select the objects you wish to export and run this script from "File->Export" menu.
+Selecting the default options from the popup box will be good in most cases.
+All objects that can be represented as a mesh (mesh, curve, metaball, surface, text3d)
+will be exported as mesh data.
+"""
+
+
+# --------------------------------------------------------------------------
+# OBJ Export v1.1 by Campbell Barton (AKA Ideasman)
+# --------------------------------------------------------------------------
+# ***** BEGIN GPL LICENSE BLOCK *****
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License
+# as published by the Free Software Foundation; either version 2
+# of the License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
+#
+# ***** END GPL LICENCE BLOCK *****
+# --------------------------------------------------------------------------
+
+
import bpy
+import os # os.sep
+import Mathutils
+
+# Returns a tuple - path,extension.
+# 'hello.obj' > ('hello', '.obj')
+def splitExt(path):
+ dotidx = path.rfind('.')
+ if dotidx == -1:
+ return path, ''
+ else:
+ return path[:dotidx], path[dotidx:]
+
+def fixName(name):
+ if name == None:
+ return 'None'
+ else:
+ return name.replace(' ', '_')
+
+
+# this used to be in BPySys module
+# frankly, I don't understand how it works
+def BPySys_cleanName(name):
+
+ v = [0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,46,47,58,59,60,61,62,63,64,91,92,93,94,96,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253,254]
+
+ invalid = ''.join([chr(i) for i in v])
+
+ for ch in invalid:
+ name = name.replace(ch, '_')
+ return name
+
+# A Dict of Materials
+# (material.name, image.name):matname_imagename # matname_imagename has gaps removed.
+MTL_DICT = {}
-def write_obj(filepath, scene, ob):
- out = open(filepath, 'w')
+def write_mtl(scene, filename):
- # create a temporary mesh
- mesh = ob.create_render_mesh(scene)
+ world = scene.world
+ worldAmb = world.ambient_color
- # for vert in mesh.verts:
- # ^ iterating that way doesn't work atm for some reason
+ file = open(filename, "w")
+ # XXX
+# file.write('# Blender3D MTL File: %s\n' % Blender.Get('filename').split('\\')[-1].split('/')[-1])
+ file.write('# Material Count: %i\n' % len(MTL_DICT))
+ # Write material/image combinations we have used.
+ for key, (mtl_mat_name, mat, img) in MTL_DICT.items():
+
+ # Get the Blender data for the material and the image.
+ # Having an image named None will make a bug, dont do it :)
+
+ file.write('newmtl %s\n' % mtl_mat_name) # Define a new material: matname_imgname
+
+ if mat:
+ file.write('Ns %.6f\n' % ((mat.specular_hardness-1) * 1.9607843137254901) ) # Hardness, convert blenders 1-511 to MTL's
+ file.write('Ka %.6f %.6f %.6f\n' % tuple([c*mat.ambient for c in worldAmb]) ) # Ambient, uses mirror colour,
+ file.write('Kd %.6f %.6f %.6f\n' % tuple([c*mat.diffuse_reflection for c in mat.diffuse_color]) ) # Diffuse
+ file.write('Ks %.6f %.6f %.6f\n' % tuple([c*mat.specular_reflection for c in mat.specular_color]) ) # Specular
+ file.write('Ni %.6f\n' % mat.ior) # Refraction index
+ file.write('d %.6f\n' % mat.alpha) # Alpha (obj uses 'd' for dissolve)
- for i in range(len(mesh.verts)):
- vert = mesh.verts[i]
- out.write('v {0} {1} {2}\n'.format(vert.co[0], vert.co[1], vert.co[2]))
+ # 0 to disable lighting, 1 for ambient & diffuse only (specular color set to black), 2 for full lighting.
+ if mat.shadeless:
+ file.write('illum 0\n') # ignore lighting
+ elif mat.specular_reflection == 0:
+ file.write('illum 1\n') # no specular.
+ else:
+ file.write('illum 2\n') # light normaly
+
+ else:
+ #write a dummy material here?
+ file.write('Ns 0\n')
+ file.write('Ka %.6f %.6f %.6f\n' % tuple([c for c in worldAmb]) ) # Ambient, uses mirror colour,
+ file.write('Kd 0.8 0.8 0.8\n')
+ file.write('Ks 0.8 0.8 0.8\n')
+ file.write('d 1\n') # No alpha
+ file.write('illum 2\n') # light normaly
+
+ # Write images!
+ if img: # We have an image on the face!
+ file.write('map_Kd %s\n' % img.filename.split('\\')[-1].split('/')[-1]) # Diffuse mapping image
+
+ elif mat: # No face image. if we havea material search for MTex image.
+ for mtex in mat.textures:
+ if mtex and mtex.texure.type == 'IMAGE':
+ try:
+ filename = mtex.texture.image.filename.split('\\')[-1].split('/')[-1]
+ file.write('map_Kd %s\n' % filename) # Diffuse mapping image
+ break
+ except:
+ # Texture has no image though its an image type, best ignore.
+ pass
+
+ file.write('\n\n')
- for i in range(len(mesh.faces)):
- face = mesh.faces[i]
- out.write('f')
+ file.close()
- # but this works
- for index in face.verts:
- out.write(' {0}'.format(index + 1))
- out.write('\n')
+def copy_file(source, dest):
+ file = open(source, 'rb')
+ data = file.read()
+ file.close()
+
+ file = open(dest, 'wb')
+ file.write(data)
+ file.close()
- # delete mesh gain
- bpy.data.remove_mesh(mesh)
- out.close()
+def copy_images(dest_dir):
+ if dest_dir[-1] != os.sep:
+ dest_dir += os.sep
+# if dest_dir[-1] != sys.sep:
+# dest_dir += sys.sep
-class SCRIPT_OT_export_obj(bpy.types.Operator):
- '''A very basic OBJ exporter, writes only active object's mesh.'''
+ # Get unique image names
+ uniqueImages = {}
+ for matname, mat, image in MTL_DICT.values(): # Only use image name
+ # Get Texface images
+ if image:
+ uniqueImages[image] = image # Should use sets here. wait until Python 2.4 is default.
+
+ # Get MTex images
+ if mat:
+ for mtex in mat.textures:
+ if mtex and mtex.texture.type == 'IMAGE':
+ image_tex = mtex.texture.image
+ if image_tex:
+ try:
+ uniqueImages[image_tex] = image_tex
+ except:
+ pass
+
+ # Now copy images
+ copyCount = 0
+
+ for bImage in uniqueImages.values():
+ image_path = bpy.sys.expandpath(bImage.filename)
+ if bpy.sys.exists(image_path):
+ # Make a name for the target path.
+ dest_image_path = dest_dir + image_path.split('\\')[-1].split('/')[-1]
+ if not bpy.sys.exists(dest_image_path): # Image isnt alredy there
+ print('\tCopying "%s" > "%s"' % (image_path, dest_image_path))
+ copy_file(image_path, dest_image_path)
+ copyCount+=1
- __label__ = 'Export OBJ'
+ print('\tCopied %d images' % copyCount)
+
+# XXX not converted
+def test_nurbs_compat(ob):
+ if ob.type != 'CURVE':
+ return False
- # List of operator properties, the attributes will be assigned
- # to the class instance from the operator settings before calling.
- __props__ = [
- bpy.props.StringProperty(attr="filename", name="filename")
- ]
+ for nu in ob.data.curves:
+ if (not nu.knotsV) and nu.type != 1: # not a surface and not bezier
+ return True
- def debug(self, message):
- print("{0}: {1}".format(self.__class__.__name__, message))
+# for nu in ob.data:
+# if (not nu.knotsV) and nu.type != 1: # not a surface and not bezier
+# return True
+
+ return False
- def execute_(self, context):
- self.debug("exec")
- self.debug("filename = " + self.filename)
+# XXX not converted
+def write_nurb(file, ob, ob_mat):
+ tot_verts = 0
+ cu = ob.data
+
+ # use negative indices
+ Vector = Blender.Mathutils.Vector
+ for nu in cu:
+
+ if nu.type==0: DEG_ORDER_U = 1
+ else: DEG_ORDER_U = nu.orderU-1 # Tested to be correct
+
+ if nu.type==1:
+ print("\tWarning, bezier curve:", ob.name, "only poly and nurbs curves supported")
+ continue
+
+ if nu.knotsV:
+ print("\tWarning, surface:", ob.name, "only poly and nurbs curves supported")
+ continue
+
+ if len(nu) <= DEG_ORDER_U:
+ print("\tWarning, orderU is lower then vert count, skipping:", ob.name)
+ continue
+
+ pt_num = 0
+ do_closed = (nu.flagU & 1)
+ do_endpoints = (do_closed==0) and (nu.flagU & 2)
+
+ for pt in nu:
+ pt = Vector(pt[0], pt[1], pt[2]) * ob_mat
+ file.write('v %.6f %.6f %.6f\n' % (pt[0], pt[1], pt[2]))
+ pt_num += 1
+ tot_verts += pt_num
+
+ file.write('g %s\n' % (fixName(ob.name))) # fixName(ob.getData(1)) could use the data name too
+ file.write('cstype bspline\n') # not ideal, hard coded
+ file.write('deg %d\n' % DEG_ORDER_U) # not used for curves but most files have it still
+
+ curve_ls = [-(i+1) for i in range(pt_num)]
+
+ # 'curv' keyword
+ if do_closed:
+ if DEG_ORDER_U == 1:
+ pt_num += 1
+ curve_ls.append(-1)
+ else:
+ pt_num += DEG_ORDER_U
+ curve_ls = curve_ls + curve_ls[0:DEG_ORDER_U]
+
+ file.write('curv 0.0 1.0 %s\n' % (' '.join( [str(i) for i in curve_ls] ))) # Blender has no U and V values for the curve
+
+ # 'parm' keyword
+ tot_parm = (DEG_ORDER_U + 1) + pt_num
+ tot_parm_div = float(tot_parm-1)
+ parm_ls = [(i/tot_parm_div) for i in range(tot_parm)]
+
+ if do_endpoints: # end points, force param
+ for i in range(DEG_ORDER_U+1):
+ parm_ls[i] = 0.0
+ parm_ls[-(1+i)] = 1.0
+
+ file.write('parm u %s\n' % ' '.join( [str(i) for i in parm_ls] ))
- act = context.active_object
+ file.write('end\n')
+
+ return tot_verts
- if act.type == 'MESH':
- write_obj(self.filename, context.scene, act)
+def write(filename, objects, scene,
+ EXPORT_TRI=False,
+ EXPORT_EDGES=False,
+ EXPORT_NORMALS=False,
+ EXPORT_NORMALS_HQ=False,
+ EXPORT_UV=True,
+ EXPORT_MTL=True,
+ EXPORT_COPY_IMAGES=False,
+ EXPORT_APPLY_MODIFIERS=True,
+ EXPORT_ROTX90=True,
+ EXPORT_BLEN_OBS=True,
+ EXPORT_GROUP_BY_OB=False,
+ EXPORT_GROUP_BY_MAT=False,
+ EXPORT_KEEP_VERT_ORDER=False,
+ EXPORT_POLYGROUPS=False,
+ EXPORT_CURVE_AS_NURBS=True):
+ '''
+ Basic write function. The context and options must be alredy set
+ This can be accessed externaly
+ eg.
+ write( 'c:\\test\\foobar.obj', Blender.Object.GetSelected() ) # Using default options.
+ '''
+
+ def veckey3d(v):
+ return round(v.x, 6), round(v.y, 6), round(v.z, 6)
+
+ def veckey2d(v):
+ return round(v.x, 6), round(v.y, 6)
+
+ def findVertexGroupName(face, vWeightMap):
+ """
+ Searches the vertexDict to see what groups is assigned to a given face.
+ We use a frequency system in order to sort out the name because a given vetex can
+ belong to two or more groups at the same time. To find the right name for the face
+ we list all the possible vertex group names with their frequency and then sort by
+ frequency in descend order. The top element is the one shared by the highest number
+ of vertices is the face's group
+ """
+ weightDict = {}
+ for vert_index in face.verts:
+# for vert in face:
+ vWeights = vWeightMap[vert_index]
+# vWeights = vWeightMap[vert]
+ for vGroupName, weight in vWeights:
+ weightDict[vGroupName] = weightDict.get(vGroupName, 0) + weight
+
+ if weightDict:
+ alist = [(weight,vGroupName) for vGroupName, weight in weightDict.items()] # sort least to greatest amount of weight
+ alist.sort()
+ return(alist[-1][1]) # highest value last
else:
- self.debug("Active object is not a MESH.")
+ return '(null)'
- # XXX errors are silenced for some reason
-# raise Exception("oops!")
+ # TODO: implement this in C? dunno how it should be called...
+ def getVertsFromGroup(me, group_index):
+ ret = []
- return ('FINISHED',)
+ for i, v in enumerate(me.verts):
+ for g in v.groups:
+ if g.group == group_index:
+ ret.append((i, g.weight))
- def execute(self, context):
- self.debug("exec")
+ return ret
+
+
+ print('OBJ Export path: "%s"' % filename)
+ temp_mesh_name = '~tmp-mesh'
+
+ time1 = bpy.sys.time()
+# time1 = sys.time()
+# scn = Scene.GetCurrent()
+
+ file = open(filename, "w")
+
+ # Write Header
+ version = "2.5"
+ file.write('# Blender3D v%s OBJ File: %s\n' % (version, bpy.data.filename.split('/')[-1].split('\\')[-1] ))
+ file.write('# www.blender3d.org\n')
+
+ # Tell the obj file what material file to use.
+ if EXPORT_MTL:
+ mtlfilename = '%s.mtl' % '.'.join(filename.split('.')[:-1])
+ file.write('mtllib %s\n' % ( mtlfilename.split('\\')[-1].split('/')[-1] ))
+
+ if EXPORT_ROTX90:
+ mat_xrot90= Mathutils.RotationMatrix(-90, 4, 'x')
+
+ # Initialize totals, these are updated each object
+ totverts = totuvco = totno = 1
+
+ face_vert_index = 1
+
+ globalNormals = {}
+
+ # Get all meshes
+ for ob_main in objects:
+
+ if ob_main.dupli_type != 'NONE':
+ # XXX
+ print('creating dupli_list on', ob_main.name)
+ ob_main.create_dupli_list()
+
+ # ignore dupli children
+ if ob_main.parent and ob_main.parent.dupli_type != 'NONE':
+ # XXX
+ print(ob_main.name, 'is a dupli child - ignoring')
+ continue
+
+ obs = []
+ if ob_main.dupli_type != 'NONE':
+ obs = [(dob.object, dob.matrix) for dob in ob_main.dupli_list]
+
+ # XXX
+ print(ob_main.name, 'has', len(obs), 'dupli children')
+ else:
+ obs = [(ob_main, ob_main.matrix)]
+
+ for ob, ob_mat in obs:
+
+ if EXPORT_ROTX90:
+ ob_mat = ob_mat * mat_xrot90
+
+ # XXX postponed
+# # Nurbs curve support
+# if EXPORT_CURVE_AS_NURBS and test_nurbs_compat(ob):
+# if EXPORT_ROTX90:
+# ob_mat = ob_mat * mat_xrot90
+
+# totverts += write_nurb(file, ob, ob_mat)
+
+# continue
+# end nurbs
+
+ if ob.type != 'MESH':
+ continue
+
+ if EXPORT_APPLY_MODIFIERS:
+ me = ob.create_mesh('PREVIEW')
+ else:
+ me = ob.data.create_copy()
+
+ me.transform(ob_mat)
+
+# # Will work for non meshes now! :)
+# me= BPyMesh.getMeshFromObject(ob, containerMesh, EXPORT_APPLY_MODIFIERS, EXPORT_POLYGROUPS, scn)
+# if not me:
+# continue
+
+ if EXPORT_UV:
+ faceuv = len(me.uv_layers) > 0
+ else:
+ faceuv = False
+
+ # We have a valid mesh
+ if EXPORT_TRI and me.faces:
+ # Add a dummy object to it.
+ has_quads = False
+ for f in me.faces:
+ if f.verts[3] != 0:
+ has_quads = True
+ break
+
+ if has_quads:
+ newob = bpy.data.add_object('MESH', 'temp_object')
+ newob.data = me
+ # if we forget to set Object.data - crash
+ scene.add_object(newob)
+ newob.convert_to_triface(scene)
+ # mesh will still be there
+ scene.remove_object(newob)
+
+ # Make our own list so it can be sorted to reduce context switching
+ face_index_pairs = [ (face, index) for index, face in enumerate(me.faces)]
+ # faces = [ f for f in me.faces ]
+
+ if EXPORT_EDGES:
+ edges = me.edges
+ else:
+ edges = []
+
+ if not (len(face_index_pairs)+len(edges)+len(me.verts)): # Make sure there is somthing to write
+
+ # clean up
+ bpy.data.remove_mesh(me)
+
+ continue # dont bother with this mesh.
+
+ # XXX
+ # High Quality Normals
+ if EXPORT_NORMALS and face_index_pairs:
+ pass
+# if EXPORT_NORMALS_HQ:
+# BPyMesh.meshCalcNormals(me)
+# else:
+# # transforming normals is incorrect
+# # when the matrix is scaled,
+# # better to recalculate them
+# me.calcNormals()
+
+ materials = me.materials
+
+ materialNames = []
+ materialItems = [m for m in materials]
+ if materials:
+ for mat in materials:
+ if mat: # !=None
+ materialNames.append(mat.name)
+ else:
+ materialNames.append(None)
+ # Cant use LC because some materials are None.
+ # materialNames = map(lambda mat: mat.name, materials) # Bug Blender, dosent account for null materials, still broken.
+
+ # Possible there null materials, will mess up indicies
+ # but at least it will export, wait until Blender gets fixed.
+ materialNames.extend((16-len(materialNames)) * [None])
+ materialItems.extend((16-len(materialItems)) * [None])
+
+ # Sort by Material, then images
+ # so we dont over context switch in the obj file.
+ if EXPORT_KEEP_VERT_ORDER:
+ pass
+ elif faceuv:
+ # XXX update
+ tface = me.active_uv_layer.data
+
+ # exception only raised if Python 2.3 or lower...
+ try:
+ face_index_pairs.sort(key = lambda a: (a[0].material_index, tface[a[1]].image, a[0].smooth))
+ except:
+ face_index_pairs.sort(lambda a,b: cmp((a[0].material_index, tface[a[1]].image, a[0].smooth),
+ (b[0].material_index, tface[b[1]].image, b[0].smooth)))
+ elif len(materials) > 1:
+ try:
+ face_index_pairs.sort(key = lambda a: (a[0].material_index, a[0].smooth))
+ except:
+ face_index_pairs.sort(lambda a,b: cmp((a[0].material_index, a[0].smooth),
+ (b[0].material_index, b[0].smooth)))
+ else:
+ # no materials
+ try:
+ face_index_pairs.sort(key = lambda a: a[0].smooth)
+ except:
+ face_index_pairs.sort(lambda a,b: cmp(a[0].smooth, b[0].smooth))
+# if EXPORT_KEEP_VERT_ORDER:
+# pass
+# elif faceuv:
+# try: faces.sort(key = lambda a: (a.mat, a.image, a.smooth))
+# except: faces.sort(lambda a,b: cmp((a.mat, a.image, a.smooth), (b.mat, b.image, b.smooth)))
+# elif len(materials) > 1:
+# try: faces.sort(key = lambda a: (a.mat, a.smooth))
+# except: faces.sort(lambda a,b: cmp((a.mat, a.smooth), (b.mat, b.smooth)))
+# else:
+# # no materials
+# try: faces.sort(key = lambda a: a.smooth)
+# except: faces.sort(lambda a,b: cmp(a.smooth, b.smooth))
+
+ faces = [pair[0] for pair in face_index_pairs]
+
+ # Set the default mat to no material and no image.
+ contextMat = (0, 0) # Can never be this, so we will label a new material teh first chance we get.
+ contextSmooth = None # Will either be true or false, set bad to force initialization switch.
+
+ if EXPORT_BLEN_OBS or EXPORT_GROUP_BY_OB:
+ name1 = ob.name
+ name2 = ob.data.name
+ if name1 == name2:
+ obnamestring = fixName(name1)
+ else:
+ obnamestring = '%s_%s' % (fixName(name1), fixName(name2))
+
+ if EXPORT_BLEN_OBS:
+ file.write('o %s\n' % obnamestring) # Write Object name
+ else: # if EXPORT_GROUP_BY_OB:
+ file.write('g %s\n' % obnamestring)
+
+
+ # Vert
+ for v in me.verts:
+ file.write('v %.6f %.6f %.6f\n' % tuple(v.co))
+
+ # UV
+ if faceuv:
+ uv_face_mapping = [[0,0,0,0] for f in faces] # a bit of a waste for tri's :/
+
+ uv_dict = {} # could use a set() here
+ uv_layer = me.active_uv_layer
+ for f, f_index in face_index_pairs:
+
+ tface = uv_layer.data[f_index]
+
+ uvs = [tface.uv1, tface.uv2, tface.uv3]
+
+ # add another UV if it's a quad
+ if f.verts[3] != 0:
+ uvs.append(tface.uv4)
+
+ for uv_index, uv in enumerate(uvs):
+ uvkey = veckey2d(uv)
+ try:
+ uv_face_mapping[f_index][uv_index] = uv_dict[uvkey]
+ except:
+ uv_face_mapping[f_index][uv_index] = uv_dict[uvkey] = len(uv_dict)
+ file.write('vt %.6f %.6f\n' % tuple(uv))
+
+# uv_dict = {} # could use a set() here
+# for f_index, f in enumerate(faces):
+
+# for uv_index, uv in enumerate(f.uv):
+# uvkey = veckey2d(uv)
+# try:
+# uv_face_mapping[f_index][uv_index] = uv_dict[uvkey]
+# except:
+# uv_face_mapping[f_index][uv_index] = uv_dict[uvkey] = len(uv_dict)
+# file.write('vt %.6f %.6f\n' % tuple(uv))
+
+ uv_unique_count = len(uv_dict)
+ del uv, uvkey, uv_dict, f_index, uv_index
+ # Only need uv_unique_count and uv_face_mapping
+
+ # NORMAL, Smooth/Non smoothed.
+ if EXPORT_NORMALS:
+ for f in faces:
+ if f.smooth:
+ for v in f:
+ noKey = veckey3d(v.normal)
+ if noKey not in globalNormals:
+ globalNormals[noKey] = totno
+ totno +=1
+ file.write('vn %.6f %.6f %.6f\n' % noKey)
+ else:
+ # Hard, 1 normal from the face.
+ noKey = veckey3d(f.normal)
+ if noKey not in globalNormals:
+ globalNormals[noKey] = totno
+ totno +=1
+ file.write('vn %.6f %.6f %.6f\n' % noKey)
+
+ if not faceuv:
+ f_image = None
+
+ # XXX
+ if EXPORT_POLYGROUPS:
+ # Retrieve the list of vertex groups
+# vertGroupNames = me.getVertGroupNames()
+
+ currentVGroup = ''
+ # Create a dictionary keyed by face id and listing, for each vertex, the vertex groups it belongs to
+ vgroupsMap = [[] for _i in range(len(me.verts))]
+# vgroupsMap = [[] for _i in xrange(len(me.verts))]
+ for g in ob.vertex_groups:
+# for vertexGroupName in vertGroupNames:
+ for vIdx, vWeight in getVertsFromGroup(me, g.index):
+# for vIdx, vWeight in me.getVertsFromGroup(vertexGroupName, 1):
+ vgroupsMap[vIdx].append((g.name, vWeight))
+
+ for f_index, f in enumerate(faces):
+ f_v = [{"index": index, "vertex": me.verts[index]} for index in f.verts]
+
+ if f.verts[3] == 0:
+ f_v.pop()
+
+# f_v= f.v
+ f_smooth= f.smooth
+ f_mat = min(f.material_index, len(materialNames)-1)
+# f_mat = min(f.mat, len(materialNames)-1)
+ if faceuv:
+
+ tface = me.active_uv_layer.data[face_index_pairs[f_index][1]]
+
+ f_image = tface.image
+ f_uv= [tface.uv1, tface.uv2, tface.uv3]
+ if f.verts[3] != 0:
+ f_uv.append(tface.uv4)
+# f_image = f.image
+# f_uv= f.uv
+
+ # MAKE KEY
+ if faceuv and f_image: # Object is always true.
+ key = materialNames[f_mat], f_image.name
+ else:
+ key = materialNames[f_mat], None # No image, use None instead.
+
+ # Write the vertex group
+ if EXPORT_POLYGROUPS:
+ if len(ob.vertex_groups):
+ # find what vertext group the face belongs to
+ theVGroup = findVertexGroupName(f,vgroupsMap)
+ if theVGroup != currentVGroup:
+ currentVGroup = theVGroup
+ file.write('g %s\n' % theVGroup)
+# # Write the vertex group
+# if EXPORT_POLYGROUPS:
+# if vertGroupNames:
+# # find what vertext group the face belongs to
+# theVGroup = findVertexGroupName(f,vgroupsMap)
+# if theVGroup != currentVGroup:
+# currentVGroup = theVGroup
+# file.write('g %s\n' % theVGroup)
+
+ # CHECK FOR CONTEXT SWITCH
+ if key == contextMat:
+ pass # Context alredy switched, dont do anything
+ else:
+ if key[0] == None and key[1] == None:
+ # Write a null material, since we know the context has changed.
+ if EXPORT_GROUP_BY_MAT:
+ # can be mat_image or (null)
+ file.write('g %s_%s\n' % (fixName(ob.name), fixName(ob.data.name)) ) # can be mat_image or (null)
+ file.write('usemtl (null)\n') # mat, image
+
+ else:
+ mat_data= MTL_DICT.get(key)
+ if not mat_data:
+ # First add to global dict so we can export to mtl
+ # Then write mtl
+
+ # Make a new names from the mat and image name,
+ # converting any spaces to underscores with fixName.
+
+ # If none image dont bother adding it to the name
+ if key[1] == None:
+ mat_data = MTL_DICT[key] = ('%s'%fixName(key[0])), materialItems[f_mat], f_image
+ else:
+ mat_data = MTL_DICT[key] = ('%s_%s' % (fixName(key[0]), fixName(key[1]))), materialItems[f_mat], f_image
+
+ if EXPORT_GROUP_BY_MAT:
+ file.write('g %s_%s_%s\n' % (fixName(ob.name), fixName(ob.data.name), mat_data[0]) ) # can be mat_image or (null)
+
+ file.write('usemtl %s\n' % mat_data[0]) # can be mat_image or (null)
+
+ contextMat = key
+ if f_smooth != contextSmooth:
+ if f_smooth: # on now off
+ file.write('s 1\n')
+ contextSmooth = f_smooth
+ else: # was off now on
+ file.write('s off\n')
+ contextSmooth = f_smooth
+
+ file.write('f')
+ if faceuv:
+ if EXPORT_NORMALS:
+ if f_smooth: # Smoothed, use vertex normals
+ for vi, v in enumerate(f_v):
+ file.write( ' %d/%d/%d' % \
+ (v["index"] + totverts,
+ totuvco + uv_face_mapping[f_index][vi],
+ globalNormals[ veckey3d(v["vertex"].normal) ]) ) # vert, uv, normal
+
+ else: # No smoothing, face normals
+ no = globalNormals[ veckey3d(f.normal) ]
+ for vi, v in enumerate(f_v):
+ file.write( ' %d/%d/%d' % \
+ (v["index"] + totverts,
+ totuvco + uv_face_mapping[f_index][vi],
+ no) ) # vert, uv, normal
+ else: # No Normals
+ for vi, v in enumerate(f_v):
+ file.write( ' %d/%d' % (\
+ v["index"] + totverts,\
+ totuvco + uv_face_mapping[f_index][vi])) # vert, uv
+
+ face_vert_index += len(f_v)
+
+ else: # No UV's
+ if EXPORT_NORMALS:
+ if f_smooth: # Smoothed, use vertex normals
+ for v in f_v:
+ file.write( ' %d//%d' %
+ (v["index"] + totverts, globalNormals[ veckey3d(v["vertex"].normal) ]) )
+ else: # No smoothing, face normals
+ no = globalNormals[ veckey3d(f.normal) ]
+ for v in f_v:
+ file.write( ' %d//%d' % (v["index"] + totverts, no) )
+ else: # No Normals
+ for v in f_v:
+ file.write( ' %d' % (v["index"] + totverts) )
+
+ file.write('\n')
+
+ # Write edges.
+ if EXPORT_EDGES:
+ for ed in edges:
+ if ed.loose:
+ file.write('f %d %d\n' % (ed.verts[0] + totverts, ed.verts[1] + totverts))
+
+ # Make the indicies global rather then per mesh
+ totverts += len(me.verts)
+ if faceuv:
+ totuvco += uv_unique_count
+
+ # clean up
+ bpy.data.remove_mesh(me)
+
+ if ob_main.dupli_type != 'NONE':
+ ob_main.free_dupli_list()
+
+ file.close()
+
+
+ # Now we have all our materials, save them
+ if EXPORT_MTL:
+ write_mtl(scene, mtlfilename)
+ if EXPORT_COPY_IMAGES:
+ dest_dir = filename
+ # Remove chars until we are just the path.
+ while dest_dir and dest_dir[-1] not in '\\/':
+ dest_dir = dest_dir[:-1]
+ if dest_dir:
+ copy_images(dest_dir)
+ else:
+ print('\tError: "%s" could not be used as a base for an image path.' % filename)
+
+ print("OBJ Export time: %.2f" % (bpy.sys.time() - time1))
+# print "OBJ Export time: %.2f" % (sys.time() - time1)
+
+def do_export(filename, context,
+ EXPORT_APPLY_MODIFIERS = True, # not used
+ EXPORT_ROTX90 = True, # wrong
+ EXPORT_TRI = False, # ok
+ EXPORT_EDGES = False,
+ EXPORT_NORMALS = False, # not yet
+ EXPORT_NORMALS_HQ = False, # not yet
+ EXPORT_UV = True, # ok
+ EXPORT_MTL = True,
+ EXPORT_SEL_ONLY = True, # ok
+ EXPORT_ALL_SCENES = False, # XXX not working atm
+ EXPORT_ANIMATION = False,
+ EXPORT_COPY_IMAGES = False,
+ EXPORT_BLEN_OBS = True,
+ EXPORT_GROUP_BY_OB = False,
+ EXPORT_GROUP_BY_MAT = False,
+ EXPORT_KEEP_VERT_ORDER = False,
+ EXPORT_POLYGROUPS = False,
+ EXPORT_CURVE_AS_NURBS = True):
+ # Window.EditMode(0)
+ # Window.WaitCursor(1)
+
+ base_name, ext = splitExt(filename)
+ context_name = [base_name, '', '', ext] # Base name, scene name, frame number, extension
+
+ orig_scene = context.scene
+
+# if EXPORT_ALL_SCENES:
+# export_scenes = bpy.data.scenes
+# else:
+# export_scenes = [orig_scene]
+
+ # XXX only exporting one scene atm since changing
+ # current scene is not possible.
+ # Brecht says that ideally in 2.5 we won't need such a function,
+ # allowing multiple scenes open at once.
+ export_scenes = [orig_scene]
+
+ # Export all scenes.
+ for scn in export_scenes:
+ # scn.makeCurrent() # If already current, this is not slow.
+ # context = scn.getRenderingContext()
+ orig_frame = scn.current_frame
+
+ if EXPORT_ALL_SCENES: # Add scene name into the context_name
+ context_name[1] = '_%s' % BPySys_cleanName(scn.name) # WARNING, its possible that this could cause a collision. we could fix if were feeling parranoied.
+
+ # Export an animation?
+ if EXPORT_ANIMATION:
+ scene_frames = range(scn.start_frame, context.end_frame+1) # Up to and including the end frame.
+ else:
+ scene_frames = [orig_frame] # Dont export an animation.
- act = context.active_object
+ # Loop through all frames in the scene and export.
+ for frame in scene_frames:
+ if EXPORT_ANIMATION: # Add frame to the filename.
+ context_name[2] = '_%.6d' % frame
+
+ scn.current_frame = frame
+ if EXPORT_SEL_ONLY:
+ export_objects = context.selected_objects
+ else:
+ export_objects = scn.objects
+
+ full_path= ''.join(context_name)
+
+ # erm... bit of a problem here, this can overwrite files when exporting frames. not too bad.
+ # EXPORT THE FILE.
+ write(full_path, export_objects, scn,
+ EXPORT_TRI, EXPORT_EDGES, EXPORT_NORMALS,
+ EXPORT_NORMALS_HQ, EXPORT_UV, EXPORT_MTL,
+ EXPORT_COPY_IMAGES, EXPORT_APPLY_MODIFIERS,
+ EXPORT_ROTX90, EXPORT_BLEN_OBS,
+ EXPORT_GROUP_BY_OB, EXPORT_GROUP_BY_MAT, EXPORT_KEEP_VERT_ORDER,
+ EXPORT_POLYGROUPS, EXPORT_CURVE_AS_NURBS)
+
+
+ scn.current_frame = orig_frame
+
+ # Restore old active scene.
+# orig_scene.makeCurrent()
+# Window.WaitCursor(0)
- act.create_dupli_list()
- print("{0} has {1} dupli objects".format(act.name, len(act.dupli_list)))
- act.free_dupli_list()
+class EXPORT_OT_obj(bpy.types.Operator):
+ '''
+ Currently the exporter lacks these features:
+ * nurbs
+ * multiple scene export (only active scene is written)
+ * particles
+ '''
+ __label__ = 'Export OBJ'
+
+ # List of operator properties, the attributes will be assigned
+ # to the class instance from the operator settings before calling.
+
+ __props__ = [
+ bpy.props.StringProperty(attr="filename", name="File Name", description="File name used for exporting the PLY file", maxlen= 1024, default= ""),
+
+ # context group
+ bpy.props.BoolProperty(attr="use_selection", name="Selection Only", description="", default= True),
+ bpy.props.BoolProperty(attr="use_all_scenes", name="All Scenes", description="", default= False),
+ bpy.props.BoolProperty(attr="use_animation", name="All Animation", description="", default= False),
+
+ # object group
+ bpy.props.BoolProperty(attr="use_modifiers", name="Apply Modifiers", description="", default= True),
+ bpy.props.BoolProperty(attr="use_rotate90", name="Rotate X90", description="", default= True),
+
+ # extra data group
+ bpy.props.BoolProperty(attr="use_edges", name="Edges", description="", default= True),
+ bpy.props.BoolProperty(attr="use_normals", name="Normals", description="", default= False),
+ bpy.props.BoolProperty(attr="use_hq_normals", name="High Quality Normals", description="", default= True),
+ bpy.props.BoolProperty(attr="use_uvs", name="UVs", description="", default= True),
+ bpy.props.BoolProperty(attr="use_materials", name="Materials", description="", default= True),
+ bpy.props.BoolProperty(attr="copy_images", name="Copy Images", description="", default= False),
+ bpy.props.BoolProperty(attr="use_triangles", name="Triangulate", description="", default= False),
+ bpy.props.BoolProperty(attr="use_vertex_groups", name="Polygroups", description="", default= False),
+ bpy.props.BoolProperty(attr="use_nurbs", name="Nurbs", description="", default= False),
+
+ # grouping group
+ bpy.props.BoolProperty(attr="use_blen_objects", name="Objects as OBJ Objects", description="", default= True),
+ bpy.props.BoolProperty(attr="group_by_object", name="Objects as OBJ Groups ", description="", default= False),
+ bpy.props.BoolProperty(attr="group_by_material", name="Material Groups", description="", default= False),
+ bpy.props.BoolProperty(attr="keep_vertex_order", name="Keep Vertex Order", description="", default= False)
+ ]
+
+ def execute(self, context):
+
+ do_export(self.filename, context,
+ EXPORT_TRI=self.use_triangles,
+ EXPORT_EDGES=self.use_edges,
+ EXPORT_NORMALS=self.use_normals,
+ EXPORT_NORMALS_HQ=self.use_hq_normals,
+ EXPORT_UV=self.use_uvs,
+ EXPORT_MTL=self.use_materials,
+ EXPORT_COPY_IMAGES=self.copy_images,
+ EXPORT_APPLY_MODIFIERS=self.use_modifiers,
+ EXPORT_ROTX90=self.use_rotate90,
+ EXPORT_BLEN_OBS=self.use_blen_objects,
+ EXPORT_GROUP_BY_OB=self.group_by_object,
+ EXPORT_GROUP_BY_MAT=self.group_by_material,
+ EXPORT_KEEP_VERT_ORDER=self.keep_vertex_order,
+ EXPORT_POLYGROUPS=self.use_vertex_groups,
+ EXPORT_CURVE_AS_NURBS=self.use_nurbs,
+ EXPORT_SEL_ONLY=self.use_selection,
+ EXPORT_ALL_SCENES=self.use_all_scenes)
return ('FINISHED',)
def invoke(self, context, event):
- self.debug("invoke")
wm = context.manager
wm.add_fileselect(self.__operator__)
return ('RUNNING_MODAL',)
- def poll(self, context): # poll isnt working yet
- self.debug("poll")
- return True
+ def poll(self, context): # Poll isnt working yet
+ print("Poll")
+ return context.active_object != None
+
+bpy.ops.add(EXPORT_OT_obj)
-bpy.ops.add(SCRIPT_OT_export_obj)
+if __name__ == "__main__":
+ bpy.ops.EXPORT_OT_obj(filename="/tmp/test.obj")
+# CONVERSION ISSUES
+# - matrix problem
+# - duplis - only tested dupliverts
+# - NURBS - needs API additions
+# - all scenes export
+# - normals calculation
diff --git a/release/io/export_ply.py b/release/io/export_ply.py
index ed983c2b169..ce1cdc55d09 100644
--- a/release/io/export_ply.py
+++ b/release/io/export_ply.py
@@ -64,7 +64,7 @@ def write(filename, scene, ob, \
raise Exception("Error, Select 1 active object")
return
- file = open(filename, 'wb')
+ file = open(filename, 'w')
#EXPORT_EDGES = Draw.Create(0)
@@ -123,8 +123,8 @@ def write(filename, scene, ob, \
mesh_verts = mesh.verts # save a lookup
ply_verts = [] # list of dictionaries
# vdict = {} # (index, normal, uv) -> new index
- vdict = [{} for i in xrange(len(mesh_verts))]
- ply_faces = [[] for f in xrange(len(mesh.faces))]
+ vdict = [{} for i in range(len(mesh_verts))]
+ ply_faces = [[] for f in range(len(mesh.faces))]
vert_count = 0
for i, f in enumerate(mesh.faces):
diff --git a/release/scripts/3ds_export.py b/release/scripts/3ds_export.py
index 87680bce1b0..69b4d00b4d8 100644
--- a/release/scripts/3ds_export.py
+++ b/release/scripts/3ds_export.py
@@ -863,19 +863,24 @@ def make_kf_obj_node(obj, name_to_id):
"""
import BPyMessages
-def save_3ds(filename):
+def save_3ds(filename, context):
'''Save the Blender scene to a 3ds file.'''
# Time the export
if not filename.lower().endswith('.3ds'):
filename += '.3ds'
-
- if not BPyMessages.Warning_SaveOver(filename):
- return
-
- time1= Blender.sys.time()
- Blender.Window.WaitCursor(1)
- sce= bpy.data.scenes.active
+
+ # XXX
+# if not BPyMessages.Warning_SaveOver(filename):
+# return
+
+ # XXX
+ time1 = bpy.sys.time()
+# time1= Blender.sys.time()
+# Blender.Window.WaitCursor(1)
+
+ sce = context.scene
+# sce= bpy.data.scenes.active
# Initialize the main chunk (primary):
primary = _3ds_chunk(PRIMARY)
@@ -901,7 +906,8 @@ def save_3ds(filename):
# each material is added once):
materialDict = {}
mesh_objects = []
- for ob in sce.objects.context:
+ for ob in context.selected_objects:
+# for ob in sce.objects.context:
for ob_derived, mat in getDerivedObjects(ob, False):
data = getMeshFromObject(ob_derived, None, True, False, sce)
if data:
diff --git a/release/scripts/export_obj-2.5.py b/release/scripts/export_obj-2.5.py
deleted file mode 100644
index cd8e423ed07..00000000000
--- a/release/scripts/export_obj-2.5.py
+++ /dev/null
@@ -1,1217 +0,0 @@
-#!BPY
-
-"""
-Name: 'Wavefront (.obj)...'
-Blender: 248
-Group: 'Export'
-Tooltip: 'Save a Wavefront OBJ File'
-"""
-
-__author__ = "Campbell Barton, Jiri Hnidek, Paolo Ciccone"
-__url__ = ['http://wiki.blender.org/index.php/Scripts/Manual/Export/wavefront_obj', 'www.blender.org', 'blenderartists.org']
-__version__ = "1.21"
-
-__bpydoc__ = """\
-This script is an exporter to OBJ file format.
-
-Usage:
-
-Select the objects you wish to export and run this script from "File->Export" menu.
-Selecting the default options from the popup box will be good in most cases.
-All objects that can be represented as a mesh (mesh, curve, metaball, surface, text3d)
-will be exported as mesh data.
-"""
-
-
-# --------------------------------------------------------------------------
-# OBJ Export v1.1 by Campbell Barton (AKA Ideasman)
-# --------------------------------------------------------------------------
-# ***** BEGIN GPL LICENSE BLOCK *****
-#
-# This program is free software; you can redistribute it and/or
-# modify it under the terms of the GNU General Public License
-# as published by the Free Software Foundation; either version 2
-# of the License, or (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program; if not, write to the Free Software Foundation,
-# Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
-#
-# ***** END GPL LICENCE BLOCK *****
-# --------------------------------------------------------------------------
-
-
-import bpy
-# import BPySys
-
-# import Blender
-# from Blender import Mesh, Scene, Window, sys, Image, Draw
-# import BPyMesh
-# import BPyObject
-# import BPySys
-# import BPyMessages
-
-# Returns a tuple - path,extension.
-# 'hello.obj' > ('hello', '.obj')
-def splitExt(path):
- dotidx = path.rfind('.')
- if dotidx == -1:
- return path, ''
- else:
- return path[:dotidx], path[dotidx:]
-
-def fixName(name):
- if name == None:
- return 'None'
- else:
- return name.replace(' ', '_')
-
-# A Dict of Materials
-# (material.name, image.name):matname_imagename # matname_imagename has gaps removed.
-MTL_DICT = {}
-
-def write_mtl(scene, filename):
-
- world = bpy.data.worlds[0]
- worldAmb = world.ambient_color
-
-# world = Blender.World.GetCurrent()
-# if world:
-# worldAmb = world.getAmb()
-# else:
-# worldAmb = (0,0,0) # Default value
-
- file = open(filename, "w")
- # XXX
-# file.write('# Blender3D MTL File: %s\n' % Blender.Get('filename').split('\\')[-1].split('/')[-1])
- file.write('# Material Count: %i\n' % len(MTL_DICT))
- # Write material/image combinations we have used.
- for key, (mtl_mat_name, mat, img) in MTL_DICT.iteritems():
-
- # Get the Blender data for the material and the image.
- # Having an image named None will make a bug, dont do it :)
-
- file.write('newmtl %s\n' % mtl_mat_name) # Define a new material: matname_imgname
-
- if mat:
- file.write('Ns %.6f\n' % ((mat.getHardness()-1) * 1.9607843137254901) ) # Hardness, convert blenders 1-511 to MTL's
- file.write('Ka %.6f %.6f %.6f\n' % tuple([c*mat.amb for c in worldAmb]) ) # Ambient, uses mirror colour,
- file.write('Kd %.6f %.6f %.6f\n' % tuple([c*mat.ref for c in mat.rgbCol]) ) # Diffuse
- file.write('Ks %.6f %.6f %.6f\n' % tuple([c*mat.spec for c in mat.specCol]) ) # Specular
- file.write('Ni %.6f\n' % mat.IOR) # Refraction index
- file.write('d %.6f\n' % mat.alpha) # Alpha (obj uses 'd' for dissolve)
-
- # 0 to disable lighting, 1 for ambient & diffuse only (specular color set to black), 2 for full lighting.
- if mat.getMode() & Blender.Material.Modes['SHADELESS']:
- file.write('illum 0\n') # ignore lighting
- elif mat.getSpec() == 0:
- file.write('illum 1\n') # no specular.
- else:
- file.write('illum 2\n') # light normaly
-
- else:
- #write a dummy material here?
- file.write('Ns 0\n')
- file.write('Ka %.6f %.6f %.6f\n' % tuple([c for c in worldAmb]) ) # Ambient, uses mirror colour,
- file.write('Kd 0.8 0.8 0.8\n')
- file.write('Ks 0.8 0.8 0.8\n')
- file.write('d 1\n') # No alpha
- file.write('illum 2\n') # light normaly
-
- # Write images!
- if img: # We have an image on the face!
- file.write('map_Kd %s\n' % img.filename.split('\\')[-1].split('/')[-1]) # Diffuse mapping image
-
- elif mat: # No face image. if we havea material search for MTex image.
- for mtex in mat.getTextures():
- if mtex and mtex.tex.type == Blender.Texture.Types.IMAGE:
- try:
- filename = mtex.tex.image.filename.split('\\')[-1].split('/')[-1]
- file.write('map_Kd %s\n' % filename) # Diffuse mapping image
- break
- except:
- # Texture has no image though its an image type, best ignore.
- pass
-
- file.write('\n\n')
-
- file.close()
-
-def copy_file(source, dest):
- file = open(source, 'rb')
- data = file.read()
- file.close()
-
- file = open(dest, 'wb')
- file.write(data)
- file.close()
-
-
-def copy_images(dest_dir):
- if dest_dir[-1] != sys.sep:
- dest_dir += sys.sep
-
- # Get unique image names
- uniqueImages = {}
- for matname, mat, image in MTL_DICT.itervalues(): # Only use image name
- # Get Texface images
- if image:
- uniqueImages[image] = image # Should use sets here. wait until Python 2.4 is default.
-
- # Get MTex images
- if mat:
- for mtex in mat.getTextures():
- if mtex and mtex.tex.type == Blender.Texture.Types.IMAGE:
- image_tex = mtex.tex.image
- if image_tex:
- try:
- uniqueImages[image_tex] = image_tex
- except:
- pass
-
- # Now copy images
- copyCount = 0
-
- for bImage in uniqueImages.itervalues():
- image_path = sys.expandpath(bImage.filename)
- if sys.exists(image_path):
- # Make a name for the target path.
- dest_image_path = dest_dir + image_path.split('\\')[-1].split('/')[-1]
- if not sys.exists(dest_image_path): # Image isnt alredy there
- print '\tCopying "%s" > "%s"' % (image_path, dest_image_path)
- copy_file(image_path, dest_image_path)
- copyCount+=1
- print '\tCopied %d images' % copyCount
-
-
-def test_nurbs_compat(ob):
- if ob.type != 'CURVE':
- return False
-
- for nu in ob.data:
- if (not nu.knotsV) and nu.type != 1: # not a surface and not bezier
- return True
-
- return False
-
-def write_nurb(file, ob, ob_mat):
- tot_verts = 0
- cu = ob.data
-
- # use negative indices
- Vector = Blender.Mathutils.Vector
- for nu in cu:
-
- if nu.type==0: DEG_ORDER_U = 1
- else: DEG_ORDER_U = nu.orderU-1 # Tested to be correct
-
- if nu.type==1:
- print "\tWarning, bezier curve:", ob.name, "only poly and nurbs curves supported"
- continue
-
- if nu.knotsV:
- print "\tWarning, surface:", ob.name, "only poly and nurbs curves supported"
- continue
-
- if len(nu) <= DEG_ORDER_U:
- print "\tWarning, orderU is lower then vert count, skipping:", ob.name
- continue
-
- pt_num = 0
- do_closed = (nu.flagU & 1)
- do_endpoints = (do_closed==0) and (nu.flagU & 2)
-
- for pt in nu:
- pt = Vector(pt[0], pt[1], pt[2]) * ob_mat
- file.write('v %.6f %.6f %.6f\n' % (pt[0], pt[1], pt[2]))
- pt_num += 1
- tot_verts += pt_num
-
- file.write('g %s\n' % (fixName(ob.name))) # fixName(ob.getData(1)) could use the data name too
- file.write('cstype bspline\n') # not ideal, hard coded
- file.write('deg %d\n' % DEG_ORDER_U) # not used for curves but most files have it still
-
- curve_ls = [-(i+1) for i in xrange(pt_num)]
-
- # 'curv' keyword
- if do_closed:
- if DEG_ORDER_U == 1:
- pt_num += 1
- curve_ls.append(-1)
- else:
- pt_num += DEG_ORDER_U
- curve_ls = curve_ls + curve_ls[0:DEG_ORDER_U]
-
- file.write('curv 0.0 1.0 %s\n' % (' '.join( [str(i) for i in curve_ls] ))) # Blender has no U and V values for the curve
-
- # 'parm' keyword
- tot_parm = (DEG_ORDER_U + 1) + pt_num
- tot_parm_div = float(tot_parm-1)
- parm_ls = [(i/tot_parm_div) for i in xrange(tot_parm)]
-
- if do_endpoints: # end points, force param
- for i in xrange(DEG_ORDER_U+1):
- parm_ls[i] = 0.0
- parm_ls[-(1+i)] = 1.0
-
- file.write('parm u %s\n' % ' '.join( [str(i) for i in parm_ls] ))
-
- file.write('end\n')
-
- return tot_verts
-
-def write(filename, objects, scene, \
-EXPORT_TRI=False, EXPORT_EDGES=False, EXPORT_NORMALS=False, EXPORT_NORMALS_HQ=False,\
-EXPORT_UV=True, EXPORT_MTL=True, EXPORT_COPY_IMAGES=False,\
-EXPORT_APPLY_MODIFIERS=True, EXPORT_ROTX90=True, EXPORT_BLEN_OBS=True,\
-EXPORT_GROUP_BY_OB=False, EXPORT_GROUP_BY_MAT=False, EXPORT_KEEP_VERT_ORDER=False,\
-EXPORT_POLYGROUPS=False, EXPORT_CURVE_AS_NURBS=True):
- '''
- Basic write function. The context and options must be alredy set
- This can be accessed externaly
- eg.
- write( 'c:\\test\\foobar.obj', Blender.Object.GetSelected() ) # Using default options.
- '''
-
- def veckey3d(v):
- return round(v.x, 6), round(v.y, 6), round(v.z, 6)
-
- def veckey2d(v):
- return round(v.x, 6), round(v.y, 6)
-
- def findVertexGroupName(face, vWeightMap):
- """
- Searches the vertexDict to see what groups is assigned to a given face.
- We use a frequency system in order to sort out the name because a given vetex can
- belong to two or more groups at the same time. To find the right name for the face
- we list all the possible vertex group names with their frequency and then sort by
- frequency in descend order. The top element is the one shared by the highest number
- of vertices is the face's group
- """
- weightDict = {}
- for vert_index in face.verts:
-# for vert in face:
- vWeights = vWeightMap[vert_index]
-# vWeights = vWeightMap[vert]
- for vGroupName, weight in vWeights:
- weightDict[vGroupName] = weightDict.get(vGroupName, 0) + weight
-
- if weightDict:
- alist = [(weight,vGroupName) for vGroupName, weight in weightDict.iteritems()] # sort least to greatest amount of weight
- alist.sort()
- return(alist[-1][1]) # highest value last
- else:
- return '(null)'
-
- # TODO: implement this in C? dunno how it should be called...
- def getVertsFromGroup(me, group_index):
- ret = []
-
- for i, v in enumerate(me.verts):
- for g in v.groups:
- if g.group == group.index:
- ret.append((i, g.weight))
-
- return ret
-
-
- print 'OBJ Export path: "%s"' % filename
- temp_mesh_name = '~tmp-mesh'
-
- time1 = sys.time()
-# scn = Scene.GetCurrent()
- scene = context.scene
-
- file = open(filename, "w")
-
- # Write Header
- file.write('# Blender3D v%s OBJ File: %s\n' % (Blender.Get('version'), Blender.Get('filename').split('/')[-1].split('\\')[-1] ))
- file.write('# www.blender3d.org\n')
-
- # Tell the obj file what material file to use.
- if EXPORT_MTL:
- mtlfilename = '%s.mtl' % '.'.join(filename.split('.')[:-1])
- file.write('mtllib %s\n' % ( mtlfilename.split('\\')[-1].split('/')[-1] ))
-
- # Get the container mesh. - used for applying modifiers and non mesh objects.
-
-# containerMesh = meshName = tempMesh = None
-# for meshName in Blender.NMesh.GetNames():
-# if meshName.startswith(temp_mesh_name):
-# tempMesh = Mesh.Get(meshName)
-# if not tempMesh.users:
-# containerMesh = tempMesh
-# if not containerMesh:
-# containerMesh = Mesh.New(temp_mesh_name)
-
- # XXX this mesh is not removed
- # XXX this mesh should not be in database
- containerMesh = bpy.data.add_mesh(temp_mesh_name)
-
- if EXPORT_ROTX90:
- mat_xrot90= Blender.Mathutils.RotationMatrix(-90, 4, 'x')
-
-# del meshName
-# del tempMesh
-
- # Initialize totals, these are updated each object
- totverts = totuvco = totno = 1
-
- face_vert_index = 1
-
- globalNormals = {}
-
- # Get all meshes
- for ob_main in objects:
-
- if ob_main.dupli_type != 'NONE':
- ob_main.create_dupli_list()
-
- # ignore dupli children
- if ob_main.parent.dupli_type != 'NONE':
- continue
-
- obs = []
- if ob_main.dupli_type != 'NONE':
- obs = [(dob.matrix, dob.object) for dob in ob_main.dupli_list]
- else:
- obs = [ob.matrix, ob]
-
- for ob, ob_mat in obs:
- # XXX postponed
-# # Nurbs curve support
-# if EXPORT_CURVE_AS_NURBS and test_nurbs_compat(ob):
-# if EXPORT_ROTX90:
-# ob_mat = ob_mat * mat_xrot90
-
-# totverts += write_nurb(file, ob, ob_mat)
-
-# continue
-# end nurbs
-
- if ob.type != 'MESH':
- continue
-
- # XXX EXPORT_APPLY_MODIFIERS is not used (always true)
- # we also need influences to be copied... for EXPORT_POLYGROUPS to work
- # which create_preview_mesh presumably does (CD_MASK_MDEFORMVERT flag)
- me = ob.create_preview_mesh()
-
-# # Will work for non meshes now! :)
-# me= BPyMesh.getMeshFromObject(ob, containerMesh, EXPORT_APPLY_MODIFIERS, EXPORT_POLYGROUPS, scn)
-# if not me:
-# continue
-
- if EXPORT_UV:
- faceuv = len(me.uv_layers) > 0
- else:
- faceuv = False
-
- # We have a valid mesh
- if EXPORT_TRI and me.faces:
- # Add a dummy object to it.
- has_quads = False
- for f in me.faces:
-# if len(f) == 4:
- if len(f.verts) == 4:
- has_quads = True
- break
-
- if has_quads:
- newob = bpy.data.add_object('MESH', 'temp_object')
- scene.add_object(newob)
- newob.convert_to_triface(scene)
- # me will still be there
- scene.remove_object(newob)
-# oldmode = Mesh.Mode()
-# Mesh.Mode(Mesh.SelectModes['FACE'])
-
-# me.sel = True
-# tempob = scn.objects.new(me)
-# me.quadToTriangle(0) # more=0 shortest length
-# oldmode = Mesh.Mode(oldmode)
-# scn.objects.unlink(tempob)
-
-# Mesh.Mode(oldmode)
-
- if EXPORT_ROTX90:
- ob_mat *= mat_xrot90
-
- # Make our own list so it can be sorted to reduce context switching
- face_index_pairs = [ (face, index) for index, face in enumerate(me.faces)]
- # faces = [ f for f in me.faces ]
-
- if EXPORT_EDGES:
- edges = me.edges
- else:
- edges = []
-
- if not (len(face_index_pairs)+len(edges)+len(me.verts)): # Make sure there is somthing to write
-# if not (len(faces)+len(edges)+len(me.verts)): # Make sure there is somthing to write
-
- bpy.data.remove_mesh(me)
-
- continue # dont bother with this mesh.
-
- if EXPORT_ROTX90:
- me.transform(ob_mat*mat_xrot90)
- else:
- me.transform(ob_mat)
-
- # High Quality Normals
- if EXPORT_NORMALS and face_index_pairs:
-# if EXPORT_NORMALS and faces:
- # XXX
- pass
-# if EXPORT_NORMALS_HQ:
-# BPyMesh.meshCalcNormals(me)
-# else:
-# # transforming normals is incorrect
-# # when the matrix is scaled,
-# # better to recalculate them
-# me.calcNormals()
-
- # # Crash Blender
- #materials = me.getMaterials(1) # 1 == will return None in the list.
- materials = me.materials
-
- materialNames = []
- materialItems = materials[:]
- if materials:
- for mat in materials:
- if mat: # !=None
- materialNames.append(mat.name)
- else:
- materialNames.append(None)
- # Cant use LC because some materials are None.
- # materialNames = map(lambda mat: mat.name, materials) # Bug Blender, dosent account for null materials, still broken.
-
- # Possible there null materials, will mess up indicies
- # but at least it will export, wait until Blender gets fixed.
- materialNames.extend((16-len(materialNames)) * [None])
- materialItems.extend((16-len(materialItems)) * [None])
-
- # Sort by Material, then images
- # so we dont over context switch in the obj file.
- if EXPORT_KEEP_VERT_ORDER:
- pass
- elif faceuv:
- # XXX update
- tface = me.active_uv_layer.data
-
- # exception only raised if Python 2.3 or lower...
- try: face_index_pairs.sort(key = lambda a: (a[0].material_index, tface[a[1]].image, a[0].smooth))
- except: face_index_pairs.sort(lambda a,b: cmp((a[0].material_index, tface[a[1]].image, a[0].smooth),
- (b[0].material_index, tface[b[1]].image, b[0].smooth)))
- elif len(materials) > 1:
- try: face_index_pairs.sort(key = lambda a: (a[0].material_index, a[0].smooth))
- except: face_index_pairs.sort(lambda a,b: cmp((a[0].material_index, a[0].smooth),
- (b[0].material_index, b[0].smooth)))
- else:
- # no materials
- try: face_index_pairs.sort(key = lambda a: a[0].smooth)
- except: face_index_pairs.sort(lambda a,b: cmp(a[0].smooth, b[0].smooth))
-# if EXPORT_KEEP_VERT_ORDER:
-# pass
-# elif faceuv:
-# try: faces.sort(key = lambda a: (a.mat, a.image, a.smooth))
-# except: faces.sort(lambda a,b: cmp((a.mat, a.image, a.smooth), (b.mat, b.image, b.smooth)))
-# elif len(materials) > 1:
-# try: faces.sort(key = lambda a: (a.mat, a.smooth))
-# except: faces.sort(lambda a,b: cmp((a.mat, a.smooth), (b.mat, b.smooth)))
-# else:
-# # no materials
-# try: faces.sort(key = lambda a: a.smooth)
-# except: faces.sort(lambda a,b: cmp(a.smooth, b.smooth))
-
- faces = [pair[0] for pair in face_index_pairs]
-
- # Set the default mat to no material and no image.
- contextMat = (0, 0) # Can never be this, so we will label a new material teh first chance we get.
- contextSmooth = None # Will either be true or false, set bad to force initialization switch.
-
- if EXPORT_BLEN_OBS or EXPORT_GROUP_BY_OB:
- name1 = ob.name
- name2 = ob.data.name
- # name2 = ob.getData(1)
- if name1 == name2:
- obnamestring = fixName(name1)
- else:
- obnamestring = '%s_%s' % (fixName(name1), fixName(name2))
-
- if EXPORT_BLEN_OBS:
- file.write('o %s\n' % obnamestring) # Write Object name
- else: # if EXPORT_GROUP_BY_OB:
- file.write('g %s\n' % obnamestring)
-
-
- # Vert
- for v in me.verts:
- file.write('v %.6f %.6f %.6f\n' % tuple(v.co))
-
- # UV
- if faceuv:
- uv_face_mapping = [[0,0,0,0] for f in faces] # a bit of a waste for tri's :/
-
- uv_dict = {} # could use a set() here
- uv_layer = me.active_uv_layer
- for f, f_index in face_index_pairs:
-
- tface = uv_layer.data[f_index]
-
- uvs = [tface.uv1, tface.uv2, tface.uv3]
-
- # add another UV if it's a quad
- if tface.verts[3] != 0:
- uvs.append(tface.uv4)
-
- for uv_index, uv in enumerate(uvs):
- uvkey = veckey2d(uv)
- try:
- uv_face_mapping[f_index][uv_index] = uv_dict[uvkey]
- except:
- uv_face_mapping[f_index][uv_index] = uv_dict[uvkey] = len(uv_dict)
- file.write('vt %.6f %.6f\n' % tuple(uv))
-
-# uv_dict = {} # could use a set() here
-# for f_index, f in enumerate(faces):
-
-# for uv_index, uv in enumerate(f.uv):
-# uvkey = veckey2d(uv)
-# try:
-# uv_face_mapping[f_index][uv_index] = uv_dict[uvkey]
-# except:
-# uv_face_mapping[f_index][uv_index] = uv_dict[uvkey] = len(uv_dict)
-# file.write('vt %.6f %.6f\n' % tuple(uv))
-
- uv_unique_count = len(uv_dict)
- del uv, uvkey, uv_dict, f_index, uv_index
- # Only need uv_unique_count and uv_face_mapping
-
- # NORMAL, Smooth/Non smoothed.
- if EXPORT_NORMALS:
- for f in faces:
- if f.smooth:
- for v in f:
- noKey = veckey3d(v.normal)
-# noKey = veckey3d(v.no)
- if not globalNormals.has_key( noKey ):
- globalNormals[noKey] = totno
- totno +=1
- file.write('vn %.6f %.6f %.6f\n' % noKey)
- else:
- # Hard, 1 normal from the face.
- noKey = veckey3d(f.normal)
-# noKey = veckey3d(f.no)
- if not globalNormals.has_key( noKey ):
- globalNormals[noKey] = totno
- totno +=1
- file.write('vn %.6f %.6f %.6f\n' % noKey)
-
- if not faceuv:
- f_image = None
-
- # XXX
- if EXPORT_POLYGROUPS:
- # Retrieve the list of vertex groups
-# vertGroupNames = me.getVertGroupNames()
-
- currentVGroup = ''
- # Create a dictionary keyed by face id and listing, for each vertex, the vertex groups it belongs to
- vgroupsMap = [[] for _i in range(len(me.verts))]
-# vgroupsMap = [[] for _i in xrange(len(me.verts))]
- for g in ob.vertex_groups:
-# for vertexGroupName in vertGroupNames:
- for vIdx, vWeight in getVertsFromGroup(me, g.index)
-# for vIdx, vWeight in me.getVertsFromGroup(vertexGroupName, 1):
- vgroupsMap[vIdx].append((g.name, vWeight))
-
- for f_index, f in enumerate(faces):
- f_v = [{"index": index, "vertex": me.verts[index]} for index in f.verts]
-# f_v= f.v
- f_smooth= f.smooth
- f_mat = min(f.material_index, len(materialNames)-1)
-# f_mat = min(f.mat, len(materialNames)-1)
- if faceuv:
-
- tface = me.active_uv_layer.data[face_index_pairs[f_index][1]]
-
- f_image = tface.image
- f_uv= [tface.uv1, tface.uv2, tface.uv3]
- if f.verts[4] != 0:
- f_uv.append(tface.uv4)
-# f_image = f.image
-# f_uv= f.uv
-
- # MAKE KEY
- if faceuv and f_image: # Object is always true.
- key = materialNames[f_mat], f_image.name
- else:
- key = materialNames[f_mat], None # No image, use None instead.
-
- # XXX
- # Write the vertex group
- if EXPORT_POLYGROUPS:
- if len(ob.vertex_groups):
- # find what vertext group the face belongs to
- theVGroup = findVertexGroupName(f,vgroupsMap)
- if theVGroup != currentVGroup:
- currentVGroup = theVGroup
- file.write('g %s\n' % theVGroup)
-# # Write the vertex group
-# if EXPORT_POLYGROUPS:
-# if vertGroupNames:
-# # find what vertext group the face belongs to
-# theVGroup = findVertexGroupName(f,vgroupsMap)
-# if theVGroup != currentVGroup:
-# currentVGroup = theVGroup
-# file.write('g %s\n' % theVGroup)
-
- # CHECK FOR CONTEXT SWITCH
- if key == contextMat:
- pass # Context alredy switched, dont do anything
- else:
- if key[0] == None and key[1] == None:
- # Write a null material, since we know the context has changed.
- if EXPORT_GROUP_BY_MAT:
- # can be mat_image or (null)
- file.write('g %s_%s\n' % (fixName(ob.name), fixName(ob.data.name)) )
-# file.write('g %s_%s\n' % (fixName(ob.name), fixName(ob.getData(1))) ) # can be mat_image or (null)
- file.write('usemtl (null)\n') # mat, image
-
- else:
- mat_data= MTL_DICT.get(key)
- if not mat_data:
- # First add to global dict so we can export to mtl
- # Then write mtl
-
- # Make a new names from the mat and image name,
- # converting any spaces to underscores with fixName.
-
- # If none image dont bother adding it to the name
- if key[1] == None:
- mat_data = MTL_DICT[key] = ('%s'%fixName(key[0])), materialItems[f_mat], f_image
- else:
- mat_data = MTL_DICT[key] = ('%s_%s' % (fixName(key[0]), fixName(key[1]))), materialItems[f_mat], f_image
-
- if EXPORT_GROUP_BY_MAT:
- file.write('g %s_%s_%s\n' % (fixName(ob.name), fixName(ob.data.name), mat_data[0]) ) # can be mat_image or (null)
-# file.write('g %s_%s_%s\n' % (fixName(ob.name), fixName(ob.getData(1)), mat_data[0]) ) # can be mat_image or (null)
-
- file.write('usemtl %s\n' % mat_data[0]) # can be mat_image or (null)
-
- contextMat = key
- if f_smooth != contextSmooth:
- if f_smooth: # on now off
- file.write('s 1\n')
- contextSmooth = f_smooth
- else: # was off now on
- file.write('s off\n')
- contextSmooth = f_smooth
-
- file.write('f')
- if faceuv:
- if EXPORT_NORMALS:
- if f_smooth: # Smoothed, use vertex normals
- for vi, v in enumerate(f_v):
- file.write( ' %d/%d/%d' % \
- (v["index"] + totverts,
- totuvco + uv_face_mapping[f_index][vi],
- globalNormals[ veckey3d(v["vertex"].normal) ]) ) # vert, uv, normal
-# file.write( ' %d/%d/%d' % (\
-# v.index+totverts,\
-# totuvco + uv_face_mapping[f_index][vi],\
-# globalNormals[ veckey3d(v.no) ])) # vert, uv, normal
-
- else: # No smoothing, face normals
- no = globalNormals[ veckey3d(f.normal) ]
-# no = globalNormals[ veckey3d(f.no) ]
- for vi, v in enumerate(f_v):
- file.write( ' %d/%d/%d' % \
- (v["index"] + totverts,
- totuvco + uv_face_mapping[f_index][vi],
- no) ) # vert, uv, normal
-# file.write( ' %d/%d/%d' % (\
-# v.index+totverts,\
-# totuvco + uv_face_mapping[f_index][vi],\
-# no)) # vert, uv, normal
-
- else: # No Normals
- for vi, v in enumerate(f_v):
- file.write( ' %d/%d' % (\
- v["index"] + totverts,\
- totuvco + uv_face_mapping[f_index][vi])) # vert, uv
-# file.write( ' %d/%d' % (\
-# v.index+totverts,\
-# totuvco + uv_face_mapping[f_index][vi])) # vert, uv
-
- face_vert_index += len(f_v)
-
- else: # No UV's
- if EXPORT_NORMALS:
- if f_smooth: # Smoothed, use vertex normals
- for v in f_v:
- file.write( ' %d//%d' %
- (v["index"] + totverts, globalNormals[ veckey3d(v["vertex"].normal) ]) )
-
-# file.write( ' %d//%d' % (\
-# v.index+totverts,\
-# globalNormals[ veckey3d(v.no) ]))
- else: # No smoothing, face normals
- no = globalNormals[ veckey3d(f.normal) ]
-# no = globalNormals[ veckey3d(f.no) ]
- for v in f_v:
- file.write( ' %d//%d' % (v["index"] + totverts, no) )
-# file.write( ' %d//%d' % (\
-# v.index+totverts,\
-# no))
- else: # No Normals
- for v in f_v:
- file.write( ' %d' % (v["index"] + totverts) )
-# file.write( ' %d' % (\
-# v.index+totverts))
-
- file.write('\n')
-
- # Write edges.
- if EXPORT_EDGES:
- for ed in edges:
- if ed.loose:
- file.write('f %d %d\n' % (ed.verts[0] + totverts, ed.verts[1] + totverts))
-# LOOSE= Mesh.EdgeFlags.LOOSE
-# for ed in edges:
-# if ed.flag & LOOSE:
-# file.write('f %d %d\n' % (ed.v1.index+totverts, ed.v2.index+totverts))
-
- # Make the indicies global rather then per mesh
- totverts += len(me.verts)
- if faceuv:
- totuvco += uv_unique_count
-
- # clean up
- bpy.data.remove_mesh(me)
-# me.verts= None
-
- if ob_main.dupli_type != 'NONE':
- ob_main.free_dupli_list()
-
- file.close()
-
-
- # Now we have all our materials, save them
- if EXPORT_MTL:
- write_mtl(scene, mtlfilename)
- if EXPORT_COPY_IMAGES:
- dest_dir = filename
- # Remove chars until we are just the path.
- while dest_dir and dest_dir[-1] not in '\\/':
- dest_dir = dest_dir[:-1]
- if dest_dir:
- copy_images(dest_dir)
- else:
- print '\tError: "%s" could not be used as a base for an image path.' % filename
-
- print "OBJ Export time: %.2f" % (sys.time() - time1)
-
-
-# replaced by do_export
-def write_ui(filename):
-
- if not filename.lower().endswith('.obj'):
- filename += '.obj'
-
- if not BPyMessages.Warning_SaveOver(filename):
- return
-
- global EXPORT_APPLY_MODIFIERS, EXPORT_ROTX90, EXPORT_TRI, EXPORT_EDGES,\
- EXPORT_NORMALS, EXPORT_NORMALS_HQ, EXPORT_UV,\
- EXPORT_MTL, EXPORT_SEL_ONLY, EXPORT_ALL_SCENES,\
- EXPORT_ANIMATION, EXPORT_COPY_IMAGES, EXPORT_BLEN_OBS,\
- EXPORT_GROUP_BY_OB, EXPORT_GROUP_BY_MAT, EXPORT_KEEP_VERT_ORDER,\
- EXPORT_POLYGROUPS, EXPORT_CURVE_AS_NURBS
-
- EXPORT_APPLY_MODIFIERS = Draw.Create(0)
- EXPORT_ROTX90 = Draw.Create(1)
- EXPORT_TRI = Draw.Create(0)
- EXPORT_EDGES = Draw.Create(1)
- EXPORT_NORMALS = Draw.Create(0)
- EXPORT_NORMALS_HQ = Draw.Create(0)
- EXPORT_UV = Draw.Create(1)
- EXPORT_MTL = Draw.Create(1)
- EXPORT_SEL_ONLY = Draw.Create(1)
- EXPORT_ALL_SCENES = Draw.Create(0)
- EXPORT_ANIMATION = Draw.Create(0)
- EXPORT_COPY_IMAGES = Draw.Create(0)
- EXPORT_BLEN_OBS = Draw.Create(0)
- EXPORT_GROUP_BY_OB = Draw.Create(0)
- EXPORT_GROUP_BY_MAT = Draw.Create(0)
- EXPORT_KEEP_VERT_ORDER = Draw.Create(1)
- EXPORT_POLYGROUPS = Draw.Create(0)
- EXPORT_CURVE_AS_NURBS = Draw.Create(1)
-
-
- # Old UI
- '''
- # removed too many options are bad!
-
- # Get USER Options
- pup_block = [\
- ('Context...'),\
- ('Selection Only', EXPORT_SEL_ONLY, 'Only export objects in visible selection. Else export whole scene.'),\
- ('All Scenes', EXPORT_ALL_SCENES, 'Each scene as a separate OBJ file.'),\
- ('Animation', EXPORT_ANIMATION, 'Each frame as a numbered OBJ file.'),\
- ('Object Prefs...'),\
- ('Apply Modifiers', EXPORT_APPLY_MODIFIERS, 'Use transformed mesh data from each object. May break vert order for morph targets.'),\
- ('Rotate X90', EXPORT_ROTX90 , 'Rotate on export so Blenders UP is translated into OBJs UP'),\
- ('Keep Vert Order', EXPORT_KEEP_VERT_ORDER, 'Keep vert and face order, disables some other options.'),\
- ('Extra Data...'),\
- ('Edges', EXPORT_EDGES, 'Edges not connected to faces.'),\
- ('Normals', EXPORT_NORMALS, 'Export vertex normal data (Ignored on import).'),\
- ('High Quality Normals', EXPORT_NORMALS_HQ, 'Calculate high quality normals for rendering.'),\
- ('UVs', EXPORT_UV, 'Export texface UV coords.'),\
- ('Materials', EXPORT_MTL, 'Write a separate MTL file with the OBJ.'),\
- ('Copy Images', EXPORT_COPY_IMAGES, 'Copy image files to the export directory, never overwrite.'),\
- ('Triangulate', EXPORT_TRI, 'Triangulate quads.'),\
- ('Grouping...'),\
- ('Objects', EXPORT_BLEN_OBS, 'Export blender objects as "OBJ objects".'),\
- ('Object Groups', EXPORT_GROUP_BY_OB, 'Export blender objects as "OBJ Groups".'),\
- ('Material Groups', EXPORT_GROUP_BY_MAT, 'Group by materials.'),\
- ]
-
- if not Draw.PupBlock('Export...', pup_block):
- return
- '''
-
- # BEGIN ALTERNATIVE UI *******************
- if True:
-
- EVENT_NONE = 0
- EVENT_EXIT = 1
- EVENT_REDRAW = 2
- EVENT_EXPORT = 3
-
- GLOBALS = {}
- GLOBALS['EVENT'] = EVENT_REDRAW
- #GLOBALS['MOUSE'] = Window.GetMouseCoords()
- GLOBALS['MOUSE'] = [i/2 for i in Window.GetScreenSize()]
-
- def obj_ui_set_event(e,v):
- GLOBALS['EVENT'] = e
-
- def do_split(e,v):
- global EXPORT_BLEN_OBS, EXPORT_GROUP_BY_OB, EXPORT_GROUP_BY_MAT, EXPORT_APPLY_MODIFIERS, KEEP_VERT_ORDER, EXPORT_POLYGROUPS
- if EXPORT_BLEN_OBS.val or EXPORT_GROUP_BY_OB.val or EXPORT_GROUP_BY_MAT.val or EXPORT_APPLY_MODIFIERS.val:
- EXPORT_KEEP_VERT_ORDER.val = 0
- else:
- EXPORT_KEEP_VERT_ORDER.val = 1
-
- def do_vertorder(e,v):
- global EXPORT_BLEN_OBS, EXPORT_GROUP_BY_OB, EXPORT_GROUP_BY_MAT, EXPORT_APPLY_MODIFIERS, KEEP_VERT_ORDER
- if EXPORT_KEEP_VERT_ORDER.val:
- EXPORT_BLEN_OBS.val = EXPORT_GROUP_BY_OB.val = EXPORT_GROUP_BY_MAT.val = EXPORT_APPLY_MODIFIERS.val = 0
- else:
- if not (EXPORT_BLEN_OBS.val or EXPORT_GROUP_BY_OB.val or EXPORT_GROUP_BY_MAT.val or EXPORT_APPLY_MODIFIERS.val):
- EXPORT_KEEP_VERT_ORDER.val = 1
-
-
- def do_help(e,v):
- url = __url__[0]
- print 'Trying to open web browser with documentation at this address...'
- print '\t' + url
-
- try:
- import webbrowser
- webbrowser.open(url)
- except:
- print '...could not open a browser window.'
-
- def obj_ui():
- ui_x, ui_y = GLOBALS['MOUSE']
-
- # Center based on overall pup size
- ui_x -= 165
- ui_y -= 140
-
- global EXPORT_APPLY_MODIFIERS, EXPORT_ROTX90, EXPORT_TRI, EXPORT_EDGES,\
- EXPORT_NORMALS, EXPORT_NORMALS_HQ, EXPORT_UV,\
- EXPORT_MTL, EXPORT_SEL_ONLY, EXPORT_ALL_SCENES,\
- EXPORT_ANIMATION, EXPORT_COPY_IMAGES, EXPORT_BLEN_OBS,\
- EXPORT_GROUP_BY_OB, EXPORT_GROUP_BY_MAT, EXPORT_KEEP_VERT_ORDER,\
- EXPORT_POLYGROUPS, EXPORT_CURVE_AS_NURBS
-
- Draw.Label('Context...', ui_x+9, ui_y+239, 220, 20)
- Draw.BeginAlign()
- EXPORT_SEL_ONLY = Draw.Toggle('Selection Only', EVENT_NONE, ui_x+9, ui_y+219, 110, 20, EXPORT_SEL_ONLY.val, 'Only export objects in visible selection. Else export whole scene.')
- EXPORT_ALL_SCENES = Draw.Toggle('All Scenes', EVENT_NONE, ui_x+119, ui_y+219, 110, 20, EXPORT_ALL_SCENES.val, 'Each scene as a separate OBJ file.')
- EXPORT_ANIMATION = Draw.Toggle('Animation', EVENT_NONE, ui_x+229, ui_y+219, 110, 20, EXPORT_ANIMATION.val, 'Each frame as a numbered OBJ file.')
- Draw.EndAlign()
-
-
- Draw.Label('Output Options...', ui_x+9, ui_y+189, 220, 20)
- Draw.BeginAlign()
- EXPORT_APPLY_MODIFIERS = Draw.Toggle('Apply Modifiers', EVENT_REDRAW, ui_x+9, ui_y+170, 110, 20, EXPORT_APPLY_MODIFIERS.val, 'Use transformed mesh data from each object. May break vert order for morph targets.', do_split)
- EXPORT_ROTX90 = Draw.Toggle('Rotate X90', EVENT_NONE, ui_x+119, ui_y+170, 110, 20, EXPORT_ROTX90.val, 'Rotate on export so Blenders UP is translated into OBJs UP')
- EXPORT_COPY_IMAGES = Draw.Toggle('Copy Images', EVENT_NONE, ui_x+229, ui_y+170, 110, 20, EXPORT_COPY_IMAGES.val, 'Copy image files to the export directory, never overwrite.')
- Draw.EndAlign()
-
-
- Draw.Label('Export...', ui_x+9, ui_y+139, 220, 20)
- Draw.BeginAlign()
- EXPORT_EDGES = Draw.Toggle('Edges', EVENT_NONE, ui_x+9, ui_y+120, 50, 20, EXPORT_EDGES.val, 'Edges not connected to faces.')
- EXPORT_TRI = Draw.Toggle('Triangulate', EVENT_NONE, ui_x+59, ui_y+120, 70, 20, EXPORT_TRI.val, 'Triangulate quads.')
- Draw.EndAlign()
- Draw.BeginAlign()
- EXPORT_MTL = Draw.Toggle('Materials', EVENT_NONE, ui_x+139, ui_y+120, 70, 20, EXPORT_MTL.val, 'Write a separate MTL file with the OBJ.')
- EXPORT_UV = Draw.Toggle('UVs', EVENT_NONE, ui_x+209, ui_y+120, 31, 20, EXPORT_UV.val, 'Export texface UV coords.')
- Draw.EndAlign()
- Draw.BeginAlign()
- EXPORT_NORMALS = Draw.Toggle('Normals', EVENT_NONE, ui_x+250, ui_y+120, 59, 20, EXPORT_NORMALS.val, 'Export vertex normal data (Ignored on import).')
- EXPORT_NORMALS_HQ = Draw.Toggle('HQ', EVENT_NONE, ui_x+309, ui_y+120, 31, 20, EXPORT_NORMALS_HQ.val, 'Calculate high quality normals for rendering.')
- Draw.EndAlign()
- EXPORT_POLYGROUPS = Draw.Toggle('Polygroups', EVENT_REDRAW, ui_x+9, ui_y+95, 120, 20, EXPORT_POLYGROUPS.val, 'Export vertex groups as OBJ groups (one group per face approximation).')
-
- EXPORT_CURVE_AS_NURBS = Draw.Toggle('Nurbs', EVENT_NONE, ui_x+139, ui_y+95, 100, 20, EXPORT_CURVE_AS_NURBS.val, 'Export 3D nurbs curves and polylines as OBJ curves, (bezier not supported).')
-
-
- Draw.Label('Blender Objects as OBJ:', ui_x+9, ui_y+59, 220, 20)
- Draw.BeginAlign()
- EXPORT_BLEN_OBS = Draw.Toggle('Objects', EVENT_REDRAW, ui_x+9, ui_y+39, 60, 20, EXPORT_BLEN_OBS.val, 'Export blender objects as "OBJ objects".', do_split)
- EXPORT_GROUP_BY_OB = Draw.Toggle('Groups', EVENT_REDRAW, ui_x+69, ui_y+39, 60, 20, EXPORT_GROUP_BY_OB.val, 'Export blender objects as "OBJ Groups".', do_split)
- EXPORT_GROUP_BY_MAT = Draw.Toggle('Material Groups', EVENT_REDRAW, ui_x+129, ui_y+39, 100, 20, EXPORT_GROUP_BY_MAT.val, 'Group by materials.', do_split)
- Draw.EndAlign()
-
- EXPORT_KEEP_VERT_ORDER = Draw.Toggle('Keep Vert Order', EVENT_REDRAW, ui_x+239, ui_y+39, 100, 20, EXPORT_KEEP_VERT_ORDER.val, 'Keep vert and face order, disables some other options. Use for morph targets.', do_vertorder)
-
- Draw.BeginAlign()
- Draw.PushButton('Online Help', EVENT_REDRAW, ui_x+9, ui_y+9, 110, 20, 'Load the wiki page for this script', do_help)
- Draw.PushButton('Cancel', EVENT_EXIT, ui_x+119, ui_y+9, 110, 20, '', obj_ui_set_event)
- Draw.PushButton('Export', EVENT_EXPORT, ui_x+229, ui_y+9, 110, 20, 'Export with these settings', obj_ui_set_event)
- Draw.EndAlign()
-
-
- # hack so the toggle buttons redraw. this is not nice at all
- while GLOBALS['EVENT'] not in (EVENT_EXIT, EVENT_EXPORT):
- Draw.UIBlock(obj_ui, 0)
-
- if GLOBALS['EVENT'] != EVENT_EXPORT:
- return
-
- # END ALTERNATIVE UI *********************
-
-
- if EXPORT_KEEP_VERT_ORDER.val:
- EXPORT_BLEN_OBS.val = False
- EXPORT_GROUP_BY_OB.val = False
- EXPORT_GROUP_BY_MAT.val = False
- EXPORT_APPLY_MODIFIERS.val = False
-
- Window.EditMode(0)
- Window.WaitCursor(1)
-
- EXPORT_APPLY_MODIFIERS = EXPORT_APPLY_MODIFIERS.val
- EXPORT_ROTX90 = EXPORT_ROTX90.val
- EXPORT_TRI = EXPORT_TRI.val
- EXPORT_EDGES = EXPORT_EDGES.val
- EXPORT_NORMALS = EXPORT_NORMALS.val
- EXPORT_NORMALS_HQ = EXPORT_NORMALS_HQ.val
- EXPORT_UV = EXPORT_UV.val
- EXPORT_MTL = EXPORT_MTL.val
- EXPORT_SEL_ONLY = EXPORT_SEL_ONLY.val
- EXPORT_ALL_SCENES = EXPORT_ALL_SCENES.val
- EXPORT_ANIMATION = EXPORT_ANIMATION.val
- EXPORT_COPY_IMAGES = EXPORT_COPY_IMAGES.val
- EXPORT_BLEN_OBS = EXPORT_BLEN_OBS.val
- EXPORT_GROUP_BY_OB = EXPORT_GROUP_BY_OB.val
- EXPORT_GROUP_BY_MAT = EXPORT_GROUP_BY_MAT.val
- EXPORT_KEEP_VERT_ORDER = EXPORT_KEEP_VERT_ORDER.val
- EXPORT_POLYGROUPS = EXPORT_POLYGROUPS.val
- EXPORT_CURVE_AS_NURBS = EXPORT_CURVE_AS_NURBS.val
-
-
- base_name, ext = splitExt(filename)
- context_name = [base_name, '', '', ext] # basename, scene_name, framenumber, extension
-
- # Use the options to export the data using write()
- # def write(filename, objects, EXPORT_EDGES=False, EXPORT_NORMALS=False, EXPORT_MTL=True, EXPORT_COPY_IMAGES=False, EXPORT_APPLY_MODIFIERS=True):
- orig_scene = Scene.GetCurrent()
- if EXPORT_ALL_SCENES:
- export_scenes = Scene.Get()
- else:
- export_scenes = [orig_scene]
-
- # Export all scenes.
- for scn in export_scenes:
- scn.makeCurrent() # If alredy current, this is not slow.
- context = scn.getRenderingContext()
- orig_frame = Blender.Get('curframe')
-
- if EXPORT_ALL_SCENES: # Add scene name into the context_name
- context_name[1] = '_%s' % BPySys.cleanName(scn.name) # WARNING, its possible that this could cause a collision. we could fix if were feeling parranoied.
-
- # Export an animation?
- if EXPORT_ANIMATION:
- scene_frames = xrange(context.startFrame(), context.endFrame()+1) # up to and including the end frame.
- else:
- scene_frames = [orig_frame] # Dont export an animation.
-
- # Loop through all frames in the scene and export.
- for frame in scene_frames:
- if EXPORT_ANIMATION: # Add frame to the filename.
- context_name[2] = '_%.6d' % frame
-
- Blender.Set('curframe', frame)
- if EXPORT_SEL_ONLY:
- export_objects = scn.objects.context
- else:
- export_objects = scn.objects
-
- full_path= ''.join(context_name)
-
- # erm... bit of a problem here, this can overwrite files when exporting frames. not too bad.
- # EXPORT THE FILE.
- write(full_path, export_objects,\
- EXPORT_TRI, EXPORT_EDGES, EXPORT_NORMALS,\
- EXPORT_NORMALS_HQ, EXPORT_UV, EXPORT_MTL,\
- EXPORT_COPY_IMAGES, EXPORT_APPLY_MODIFIERS,\
- EXPORT_ROTX90, EXPORT_BLEN_OBS,\
- EXPORT_GROUP_BY_OB, EXPORT_GROUP_BY_MAT, EXPORT_KEEP_VERT_ORDER,\
- EXPORT_POLYGROUPS, EXPORT_CURVE_AS_NURBS)
-
- Blender.Set('curframe', orig_frame)
-
- # Restore old active scene.
- orig_scene.makeCurrent()
- Window.WaitCursor(0)
-
-
-def do_export(filename, context):
- # Window.EditMode(0)
- # Window.WaitCursor(1)
-
- EXPORT_APPLY_MODIFIERS = True
- EXPORT_ROTX90 = True
- EXPORT_TRI = False
- EXPORT_EDGES = False
- EXPORT_NORMALS = False
- EXPORT_NORMALS_HQ = False
- EXPORT_UV = True
- EXPORT_MTL = True
- EXPORT_SEL_ONLY = True
- EXPORT_ALL_SCENES = False # XXX not working atm
- EXPORT_ANIMATION = False
- EXPORT_COPY_IMAGES = False
- EXPORT_BLEN_OBS = True
- EXPORT_GROUP_BY_OB = False
- EXPORT_GROUP_BY_MAT = False
- EXPORT_KEEP_VERT_ORDER = False
- EXPORT_POLYGROUPS = False
- EXPORT_CURVE_AS_NURBS = True
-
- base_name, ext = splitExt(filename)
- context_name = [base_name, '', '', ext] # Base name, scene name, frame number, extension
-
- orig_scene = context.scene
-
-# if EXPORT_ALL_SCENES:
-# export_scenes = bpy.data.scenes
-# else:
-# export_scenes = [orig_scene]
-
- # XXX only exporting one scene atm since changing
- # current scene is not possible.
- # Brecht says that ideally in 2.5 we won't need such a function,
- # allowing multiple scenes open at once.
- export_scenes = [orig_scene]
-
- # Export all scenes.
- for scn in export_scenes:
- # scn.makeCurrent() # If already current, this is not slow.
- # context = scn.getRenderingContext()
- orig_frame = scn.current_frame
-
- if EXPORT_ALL_SCENES: # Add scene name into the context_name
- context_name[1] = '_%s' % BPySys.cleanName(scn.name) # WARNING, its possible that this could cause a collision. we could fix if were feeling parranoied.
-
- # Export an animation?
- if EXPORT_ANIMATION:
- scene_frames = xrange(scn.start_frame, context.end_frame+1) # Up to and including the end frame.
- else:
- scene_frames = [orig_frame] # Dont export an animation.
-
- # Loop through all frames in the scene and export.
- for frame in scene_frames:
- if EXPORT_ANIMATION: # Add frame to the filename.
- context_name[2] = '_%.6d' % frame
-
- scn.current_frame = frame
- if EXPORT_SEL_ONLY:
- export_objects = context.selected_objects
- else:
- export_objects = scn.objects
-
- full_path= ''.join(context_name)
-
- # erm... bit of a problem here, this can overwrite files when exporting frames. not too bad.
- # EXPORT THE FILE.
- write(full_path, export_objects, scn,
- EXPORT_TRI, EXPORT_EDGES, EXPORT_NORMALS,
- EXPORT_NORMALS_HQ, EXPORT_UV, EXPORT_MTL,
- EXPORT_COPY_IMAGES, EXPORT_APPLY_MODIFIERS,
- EXPORT_ROTX90, EXPORT_BLEN_OBS,
- EXPORT_GROUP_BY_OB, EXPORT_GROUP_BY_MAT, EXPORT_KEEP_VERT_ORDER,
- EXPORT_POLYGROUPS, EXPORT_CURVE_AS_NURBS)
-
-
- scn.current_frame = orig_frame
-
- # Restore old active scene.
-# orig_scene.makeCurrent()
-# Window.WaitCursor(0)
-
-
-class EXPORT_OT_obj(bpy.types.Operator):
- '''
- Currently the exporter lacks these features:
- * nurbs
- * multiple scene export (only active scene is written)
- * particles
- '''
- __label__ = 'Export OBJ'
-
- # List of operator properties, the attributes will be assigned
- # to the class instance from the operator settings before calling.
- __props__ = [
- # bpy.types.FloatProperty(attr="setting_1", name="Example 1",
- # default=10.0, min=0, max=10, description="Add info here"),
- # bpy.types.IntProperty(attr="setting_2", default=2),
- # bpy.types.BoolProperty(attr="toggle", default=True)
- ]
-
- def execu(self, context):
- print("Selected: " + context.active_object.name)
-
- do_export("/tmp/test.obj", context)
-
- return 'FINISHED'
-
- def invoke(self, event):
- print("Invoke")
- return 'FINISHED'
-
- def poll(self, context): # Poll isnt working yet
- print("Poll")
- return True
-
-if (hasattr(bpy.ops, "SCRIPT_OT_export_obj")):
- bpy.ops.remove(bpy.ops.SCRIPT_OT_export_obj)
-
-bpy.ops.add(SCRIPT_OT_export_obj)
-
-bpy.ops.SCRIPT_OT_export_obj()
-
-bpy.ops.remove(bpy.ops.SCRIPT_OT_export_obj)
diff --git a/release/ui/space_script.py b/release/ui/space_script.py
index d35f2d389c8..9b0809ffc75 100644
--- a/release/ui/space_script.py
+++ b/release/ui/space_script.py
@@ -32,7 +32,7 @@ class SCRIPT_HT_header(bpy.types.Header):
if context.area.show_menus:
row = layout.row(align=True)
- row.itemM(context, "SCRIPT_MT_scripts")
+ row.itemM("SCRIPT_MT_scripts")
class SCRIPT_MT_scripts(bpy.types.Menu):
__space_type__ = "SCRIPTS_WINDOW"
@@ -41,7 +41,7 @@ class SCRIPT_MT_scripts(bpy.types.Menu):
def draw(self, context):
layout = self.layout
layout.column()
- layout.itemM(context, "SCRIPT_MT_export")
+ layout.itemM("SCRIPT_MT_export")
layout.itemO("SCRIPT_OT_reload_scripts")
class SCRIPT_MT_export(bpy.types.Menu):
diff --git a/source/blender/makesrna/intern/rna_mesh_api.c b/source/blender/makesrna/intern/rna_mesh_api.c
index 984e6a5d30f..f4bc52bc517 100644
--- a/source/blender/makesrna/intern/rna_mesh_api.c
+++ b/source/blender/makesrna/intern/rna_mesh_api.c
@@ -36,6 +36,8 @@
#include "BKE_customdata.h"
#include "BKE_DerivedMesh.h"
+#include "BKE_mesh.h"
+
#include "BLI_arithb.h"
#include "DNA_mesh_types.h"
@@ -66,6 +68,14 @@ void rna_Mesh_transform(Mesh *me, float *mat)
}
}
+Mesh *rna_Mesh_create_copy(Mesh *me)
+{
+ Mesh *ret= copy_mesh(me);
+ ret->id.us--;
+
+ return ret;
+}
+
#if 0
/* extern struct EditVert *addvertlist(EditMesh *em, float *vec, struct EditVert *example); */
@@ -101,6 +111,11 @@ void RNA_api_mesh(StructRNA *srna)
parm= RNA_def_float_matrix(func, "matrix", 16, NULL, 0.0f, 0.0f, "", "Matrix.", 0.0f, 0.0f);
RNA_def_property_flag(parm, PROP_REQUIRED);
+ func= RNA_def_function(srna, "create_copy", "rna_Mesh_create_copy");
+ RNA_def_function_ui_description(func, "Create a copy of this Mesh datablock.");
+ parm= RNA_def_pointer(func, "mesh", "Mesh", "", "Mesh, remove it if it is only used for export.");
+ RNA_def_function_return(func, parm);
+
/*
func= RNA_def_function(srna, "add_geom", "rna_Mesh_add_geom");
RNA_def_function_ui_description(func, "Add geometry data to mesh.");
diff --git a/source/blender/makesrna/intern/rna_object_api.c b/source/blender/makesrna/intern/rna_object_api.c
index 1b6c298eba4..b09acb51084 100644
--- a/source/blender/makesrna/intern/rna_object_api.c
+++ b/source/blender/makesrna/intern/rna_object_api.c
@@ -36,6 +36,12 @@
#include "DNA_object_types.h"
+/* parameter to rna_Object_create_mesh */
+typedef enum CreateMeshType {
+ CREATE_MESH_PREVIEW = 0,
+ CREATE_MESH_RENDER = 1
+} CreateMeshType;
+
#ifdef RNA_RUNTIME
#include "BKE_customdata.h"
@@ -55,7 +61,7 @@
#include "ED_mesh.h"
/* copied from init_render_mesh (render code) */
-static Mesh *create_mesh(Object *ob, bContext *C, ReportList *reports, int render_mesh)
+static Mesh *rna_Object_create_mesh(Object *ob, bContext *C, ReportList *reports, int type)
{
/* CustomDataMask mask = CD_MASK_BAREMESH|CD_MASK_MTFACE|CD_MASK_MCOL; */
CustomDataMask mask = CD_MASK_MESH; /* this seems more suitable, exporter,
@@ -71,8 +77,13 @@ static Mesh *create_mesh(Object *ob, bContext *C, ReportList *reports, int rende
BKE_report(reports, RPT_ERROR, "Object should be of type MESH.");
return NULL;
}
-
- dm= render_mesh ? mesh_create_derived_render(sce, ob, mask) : mesh_create_derived_view(sce, ob, mask);
+
+ if (type == CREATE_MESH_PREVIEW) {
+ dm= mesh_create_derived_view(sce, ob, mask);
+ }
+ else {
+ dm= mesh_create_derived_render(sce, ob, mask);
+ }
if(!dm) {
/* TODO: report */
@@ -87,16 +98,6 @@ static Mesh *create_mesh(Object *ob, bContext *C, ReportList *reports, int rende
return me;
}
-static Mesh *rna_Object_create_render_mesh(Object *ob, bContext *C, ReportList *reports)
-{
- return create_mesh(ob, C, reports, 1);
-}
-
-static Mesh *rna_Object_create_preview_mesh(Object *ob, bContext *C, ReportList *reports)
-{
- return create_mesh(ob, C, reports, 0);
-}
-
/* When no longer needed, duplilist should be freed with Object.free_duplilist */
static void rna_Object_create_duplilist(Object *ob, bContext *C, ReportList *reports)
{
@@ -162,33 +163,20 @@ void RNA_api_object(StructRNA *srna)
FunctionRNA *func;
PropertyRNA *parm;
- /* copied from rna_def_object */
- static EnumPropertyItem object_type_items[] = {
- {OB_EMPTY, "EMPTY", 0, "Empty", ""},
- {OB_MESH, "MESH", 0, "Mesh", ""},
- {OB_CURVE, "CURVE", 0, "Curve", ""},
- {OB_SURF, "SURFACE", 0, "Surface", ""},
- {OB_FONT, "TEXT", 0, "Text", ""},
- {OB_MBALL, "META", 0, "Meta", ""},
- {OB_LAMP, "LAMP", 0, "Lamp", ""},
- {OB_CAMERA, "CAMERA", 0, "Camera", ""},
- {OB_WAVE, "WAVE", 0, "Wave", ""},
- {OB_LATTICE, "LATTICE", 0, "Lattice", ""},
- {OB_ARMATURE, "ARMATURE", 0, "Armature", ""},
- {0, NULL, 0, NULL, NULL}};
-
- func= RNA_def_function(srna, "create_render_mesh", "rna_Object_create_render_mesh");
- RNA_def_function_ui_description(func, "Create a Mesh datablock with all modifiers applied for rendering.");
- RNA_def_function_flag(func, FUNC_USE_CONTEXT|FUNC_USE_REPORTS);
- parm= RNA_def_pointer(func, "mesh", "Mesh", "", "Mesh created from object, remove it if it is only used for export.");
- RNA_def_function_return(func, parm);
+ static EnumPropertyItem mesh_type_items[] = {
+ {CREATE_MESH_PREVIEW, "PREVIEW", 0, "Preview", "Apply preview settings."},
+ {CREATE_MESH_RENDER, "RENDER", 0, "Render", "Apply render settings."},
+ {0, NULL, 0, NULL, NULL}
+ };
- func= RNA_def_function(srna, "create_preview_mesh", "rna_Object_create_preview_mesh");
- RNA_def_function_ui_description(func, "Create a Mesh datablock with all modifiers applied for preview.");
+ func= RNA_def_function(srna, "create_mesh", "rna_Object_create_mesh");
+ RNA_def_function_ui_description(func, "Create a Mesh datablock with all modifiers applied.");
RNA_def_function_flag(func, FUNC_USE_CONTEXT|FUNC_USE_REPORTS);
+ parm= RNA_def_enum(func, "type", mesh_type_items, 0, "", "Type of mesh settings to apply.");
+ RNA_def_property_flag(parm, PROP_REQUIRED);
parm= RNA_def_pointer(func, "mesh", "Mesh", "", "Mesh created from object, remove it if it is only used for export.");
RNA_def_function_return(func, parm);
-
+
func= RNA_def_function(srna, "create_dupli_list", "rna_Object_create_duplilist");
RNA_def_function_ui_description(func, "Create a list of dupli objects for this object, needs to be freed manually with free_dupli_list.");
RNA_def_function_flag(func, FUNC_USE_CONTEXT|FUNC_USE_REPORTS);
diff --git a/source/blender/python/intern/bpy_interface.c b/source/blender/python/intern/bpy_interface.c
index 8b5ad36f349..96ef796839b 100644
--- a/source/blender/python/intern/bpy_interface.c
+++ b/source/blender/python/intern/bpy_interface.c
@@ -19,6 +19,7 @@
#include "bpy_rna.h"
#include "bpy_operator.h"
#include "bpy_ui.h"
+#include "bpy_sys.h"
#include "bpy_util.h"
#include "DNA_anim_types.h"
@@ -91,6 +92,7 @@ void BPY_update_modules( void )
PyObject *mod= PyImport_ImportModuleLevel("bpy", NULL, NULL, NULL, 0);
PyModule_AddObject( mod, "data", BPY_rna_module() );
PyModule_AddObject( mod, "types", BPY_rna_types() );
+ PyModule_AddObject( mod, "sys", BPY_sys_module() );
}
/*****************************************************************************
diff --git a/source/blender/python/intern/bpy_operator_wrap.c b/source/blender/python/intern/bpy_operator_wrap.c
index b7e3c86dd91..4e5536c8552 100644
--- a/source/blender/python/intern/bpy_operator_wrap.c
+++ b/source/blender/python/intern/bpy_operator_wrap.c
@@ -331,7 +331,7 @@ PyObject *PYOP_wrap_add(PyObject *self, PyObject *py_class)
{PYOP_ATTR_UINAME, 's', 0, BPY_CLASS_ATTR_OPTIONAL},
{PYOP_ATTR_PROP, 'l', 0, BPY_CLASS_ATTR_OPTIONAL},
{PYOP_ATTR_DESCRIPTION, 's', 0, BPY_CLASS_ATTR_NONE_OK},
- {"execute", 'f', 2, BPY_CLASS_ATTR_OPTIONAL},
+ {"execute", 'f', 2, 0},
{"invoke", 'f', 3, BPY_CLASS_ATTR_OPTIONAL},
{"poll", 'f', 2, BPY_CLASS_ATTR_OPTIONAL},
{NULL, 0, 0, 0}
diff --git a/source/blender/python/intern/bpy_rna.c b/source/blender/python/intern/bpy_rna.c
index 0b8a7df1ae1..90876b36ee0 100644
--- a/source/blender/python/intern/bpy_rna.c
+++ b/source/blender/python/intern/bpy_rna.c
@@ -1367,8 +1367,12 @@ PyObject *pyrna_param_to_py(PointerRNA *ptr, PropertyRNA *prop, void *data)
newptr= *(PointerRNA*)data;
}
else {
- /* XXX this is missing the ID part! */
- RNA_pointer_create(NULL, type, *(void**)data, &newptr);
+ if (RNA_struct_is_ID(type)) {
+ RNA_id_pointer_create(*(void**)data, &newptr);
+ }
+ else {
+ RNA_pointer_create(NULL, type, *(void**)data, &newptr);
+ }
}
if (newptr.data) {
diff --git a/source/blender/python/intern/bpy_sys.c b/source/blender/python/intern/bpy_sys.c
new file mode 100644
index 00000000000..4d80d3bff93
--- /dev/null
+++ b/source/blender/python/intern/bpy_sys.c
@@ -0,0 +1,460 @@
+/*
+ * $Id: Sys.c 17889 2008-12-16 11:26:55Z campbellbarton $
+ *
+ * ***** BEGIN GPL LICENSE BLOCK *****
+ *
+ * This program is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU General Public License
+ * as published by the Free Software Foundation; either version 2
+ * of the License, or (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program; if not, write to the Free Software Foundation,
+ * Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
+ *
+ * The Original Code is Copyright (C) 2001-2002 by NaN Holding BV.
+ * All rights reserved.
+ *
+ * This is a new part of Blender.
+ *
+ * Contributor(s): Willian P. Germano, Campbell Barton
+ *
+ * ***** END GPL LICENSE BLOCK *****
+*/
+
+#include "bpy_sys.h" /*This must come first*/
+#include "bpy_util.h"
+
+#include "BKE_utildefines.h"
+#include "BKE_global.h"
+#include "BKE_context.h"
+
+#include "BLI_blenlib.h"
+
+#include "DNA_scene_types.h" /* G.scene-"r.cfra */
+
+#include "PIL_time.h"
+/* #include "gen_utils.h" */
+
+#ifdef WIN32
+#define DIRSEP '\\'
+#define DIRSEP_STR "\\"
+#else
+#define DIRSEP '/'
+#define DIRSEP_STR "/"
+#endif
+
+
+/*****************************************************************************/
+/* Python API function prototypes for the sys module. */
+/*****************************************************************************/
+static PyObject *M_sys_basename( PyObject * self, PyObject * value );
+static PyObject *M_sys_dirname( PyObject * self, PyObject * value );
+static PyObject *M_sys_join( PyObject * self, PyObject * args );
+static PyObject *M_sys_splitext( PyObject * self, PyObject * value );
+static PyObject *M_sys_makename( PyObject * self, PyObject * args,
+ PyObject * kw );
+static PyObject *M_sys_exists( PyObject * self, PyObject * value );
+static PyObject *M_sys_time( PyObject * self );
+static PyObject *M_sys_sleep( PyObject * self, PyObject * args );
+static PyObject *M_sys_expandpath( PyObject *self, PyObject *value);
+static PyObject *M_sys_cleanpath( PyObject *self, PyObject *value);
+static PyObject *M_sys_relpath( PyObject *self, PyObject *args);
+
+/*****************************************************************************/
+/* The following string definitions are used for documentation strings. */
+/* In Python these will be written to the console when doing a */
+/* Blender.sys.__doc__ */
+/*****************************************************************************/
+static char M_sys_doc[] = "The Blender.sys submodule\n\
+\n\
+This is a minimal system module to supply simple functionality available\n\
+in the default Python module os.";
+
+static char M_sys_basename_doc[] =
+ "(path) - Split 'path' in dir and filename.\n\
+Return the filename.";
+
+static char M_sys_dirname_doc[] =
+ "(path) - Split 'path' in dir and filename.\n\
+Return the dir.";
+
+static char M_sys_join_doc[] =
+ "(dir, file) - Join dir and file to form a full filename.\n\
+Return the filename.";
+
+static char M_sys_splitext_doc[] =
+ "(path) - Split 'path' in root and extension:\n\
+/this/that/file.ext -> ('/this/that/file','.ext').\n\
+Return the pair (root, extension).";
+
+static char M_sys_makename_doc[] =
+ "(path = Blender.Get('filename'), ext = \"\", strip = 0) -\n\
+Strip dir and extension from path, leaving only a name, then append 'ext'\n\
+to it (if given) and return the resulting string.\n\n\
+(path) - string: a pathname -- Blender.Get('filename') if 'path' isn't given;\n\
+(ext = \"\") - string: the extension to append.\n\
+(strip = 0) - int: strip dirname from 'path' if given and non-zero.\n\
+Ex: makename('/path/to/file/myfile.foo','-01.abc') returns 'myfile-01.abc'\n\
+Ex: makename(ext='.txt') returns 'untitled.txt' if Blender.Get('filename')\n\
+returns a path to the file 'untitled.blend'";
+
+static char M_sys_time_doc[] =
+ "() - Return a float representing time elapsed in seconds.\n\
+Each successive call is garanteed to return values greater than or\n\
+equal to the previous call.";
+
+static char M_sys_sleep_doc[] =
+ "(milliseconds = 10) - Sleep for the specified time.\n\
+(milliseconds = 10) - the amount of time in milliseconds to sleep.\n\
+This function can be necessary in tight 'get event' loops.";
+
+static char M_sys_exists_doc[] =
+ "(path) - Check if the given pathname exists.\n\
+The return value is as follows:\n\
+\t 0: path doesn't exist;\n\
+\t 1: path is an existing filename;\n\
+\t 2: path is an existing dirname;\n\
+\t-1: path exists but is neither a regular file nor a dir.";
+
+static char M_sys_expandpath_doc[] =
+"(path) - Expand this Blender internal path to a proper file system path.\n\
+(path) - the string path to convert.\n\n\
+Note: internally Blender paths can contain two special character sequences:\n\
+- '//' (at start) for base path directory (the current .blend's dir path);\n\
+- '#' characters in the filename will be replaced by the frame number.\n\n\
+This function expands these to their actual content, returning a valid path.\n\
+If the special chars are not found in the given path, it is simply returned.";
+
+static char M_sys_cleanpath_doc[] =
+"(path) - Removes parts of a path that are not needed paths such as '../foo/../bar/' and '//./././'";
+
+static char M_sys_relpath_doc[] =
+"(path, start=\"//\") - Returns the path relative to the current blend file or start if spesified";
+
+/*****************************************************************************/
+/* Python method structure definition for Blender.sys module: */
+/*****************************************************************************/
+struct PyMethodDef M_sys_methods[] = {
+ {"basename", M_sys_basename, METH_O, M_sys_basename_doc},
+ {"dirname", M_sys_dirname, METH_O, M_sys_dirname_doc},
+ {"join", M_sys_join, METH_VARARGS, M_sys_join_doc},
+ {"splitext", M_sys_splitext, METH_O, M_sys_splitext_doc},
+ {"makename", ( PyCFunction ) M_sys_makename,
+ METH_VARARGS | METH_KEYWORDS,
+ M_sys_makename_doc},
+ {"exists", M_sys_exists, METH_O, M_sys_exists_doc},
+ {"sleep", M_sys_sleep, METH_VARARGS, M_sys_sleep_doc},
+ {"time", ( PyCFunction ) M_sys_time, METH_NOARGS, M_sys_time_doc},
+ {"expandpath", M_sys_expandpath, METH_O, M_sys_expandpath_doc},
+ {"cleanpath", M_sys_cleanpath, METH_O, M_sys_cleanpath_doc},
+ {"relpath", M_sys_relpath, METH_VARARGS, M_sys_relpath_doc},
+ {NULL, NULL, 0, NULL}
+};
+
+#if PY_VERSION_HEX >= 0x03000000
+static struct PyModuleDef sys_module = {
+ PyModuleDef_HEAD_INIT,
+ "bpysys",
+ M_sys_doc,
+ -1,/* multiple "initialization" just copies the module dict. */
+ M_sys_methods,
+ NULL, NULL, NULL, NULL
+};
+#endif
+
+/* Module Functions */
+
+PyObject *BPY_sys_module( void )
+{
+ PyObject *submodule, *dict;
+
+#if PY_VERSION_HEX >= 0x03000000
+ submodule= PyModule_Create(&sys_module);
+#else /* Py2.x */
+ submodule= Py_InitModule3( "bpysys", M_sys_methods, M_sys_doc );
+#endif
+
+ dict = PyModule_GetDict( submodule );
+
+ /* EXPP_dict_set_item_str( dict, "dirsep", PyString_FromString(DIRSEP_STR) ); */
+ /* EXPP_dict_set_item_str( dict, "sep", PyString_FromString(DIRSEP_STR) ); */
+
+ return submodule;
+}
+
+static PyObject *M_sys_basename( PyObject * self, PyObject * value )
+{
+ char *name = _PyUnicode_AsString(value);
+ char *p, basename[FILE_MAXDIR + FILE_MAXFILE];
+ int n, len;
+
+ if( !name ) {
+ return PyErr_Format( PyExc_TypeError, "expected string argument" );
+ }
+
+ len = strlen( name );
+
+#ifdef WIN32
+ p = MAX2(strrchr( name, '/' ), strrchr( name, '\\' ));
+#else
+ p = strrchr( name, DIRSEP );
+#endif
+
+ if( p ) {
+ n = name + len - p - 1; /* - 1 because we don't want the sep */
+
+ if( n > FILE_MAXDIR + FILE_MAXFILE ) {
+ return PyErr_Format( PyExc_RuntimeError, "path too long" );
+ }
+
+ BLI_strncpy( basename, p + 1, n + 1 );
+ return PyUnicode_FromString( basename );
+ }
+
+ return PyUnicode_FromString( name );
+}
+
+static PyObject *M_sys_dirname( PyObject * self, PyObject * value )
+{
+ char *name = _PyUnicode_AsString(value);
+ char *p, dirname[FILE_MAXDIR + FILE_MAXFILE];
+ int n;
+
+ if( !name )
+ return PyErr_Format( PyExc_TypeError, "expected string argument" );
+
+#ifdef WIN32
+ p = MAX2(strrchr( name, '/' ), strrchr( name, '\\' ));
+#else
+ p = strrchr( name, DIRSEP );
+#endif
+
+ if( p ) {
+ n = p - name;
+
+ if( n > FILE_MAXDIR + FILE_MAXFILE )
+ return PyErr_Format( PyExc_RuntimeError, "path too long" );
+
+ BLI_strncpy( dirname, name, n + 1 );
+ return PyUnicode_FromString( dirname );
+ }
+
+ return PyUnicode_FromString( "." );
+}
+
+static PyObject *M_sys_join( PyObject * self, PyObject * args )
+{
+ char *name = NULL, *path = NULL;
+ char filename[FILE_MAXDIR + FILE_MAXFILE];
+ int pathlen = 0, namelen = 0;
+
+ if( !PyArg_ParseTuple( args, "ss:Blender.sys.join", &path, &name ) )
+ return NULL;
+
+ pathlen = strlen( path ) + 1;
+ namelen = strlen( name ) + 1; /* + 1 to account for '\0' for BLI_strncpy */
+
+ if( pathlen + namelen > FILE_MAXDIR + FILE_MAXFILE - 1 )
+ return PyErr_Format( PyExc_RuntimeError, "filename is too long." );
+
+ BLI_strncpy( filename, path, pathlen );
+
+ if( filename[pathlen - 2] != DIRSEP ) {
+ filename[pathlen - 1] = DIRSEP;
+ pathlen += 1;
+ }
+
+ BLI_strncpy( filename + pathlen - 1, name, namelen );
+
+ return PyUnicode_FromString( filename );
+}
+
+static PyObject *M_sys_splitext( PyObject * self, PyObject * value )
+{
+ char *name = _PyUnicode_AsString(value);
+ char *dot, *p, path[FILE_MAXDIR + FILE_MAXFILE], ext[FILE_MAXDIR + FILE_MAXFILE];
+ int n, len;
+
+ if( !name )
+ return PyErr_Format( PyExc_TypeError, "expected string argument" );
+
+ len = strlen( name );
+ dot = strrchr( name, '.' );
+
+ if( !dot )
+ return Py_BuildValue( "ss", name, "" );
+
+ p = strrchr( name, DIRSEP );
+
+ if( p ) {
+ if( p > dot )
+ return Py_BuildValue( "ss", name, "" );
+ }
+
+ n = name + len - dot;
+
+ /* loong extensions are supported -- foolish, but Python's os.path.splitext
+ * supports them, so ... */
+
+ if( n >= FILE_MAXDIR + FILE_MAXFILE || ( len - n ) >= FILE_MAXDIR + FILE_MAXFILE )
+ return PyErr_Format( PyExc_RuntimeError, "path too long" );
+
+ BLI_strncpy( ext, dot, n + 1 );
+ BLI_strncpy( path, name, dot - name + 1 );
+
+ return Py_BuildValue( "ss", path, ext );
+}
+
+static PyObject *M_sys_makename( PyObject * self, PyObject * args,
+ PyObject * kw )
+{
+ char *path = G.sce, *ext = NULL;
+ int strip = 0;
+ static char *kwlist[] = { "path", "ext", "strip", NULL };
+ char *dot = NULL, *p = NULL, basename[FILE_MAXDIR + FILE_MAXFILE];
+ int n, len, lenext = 0;
+
+ if( !PyArg_ParseTupleAndKeywords( args, kw, "|ssi:Blender.sys.makename", kwlist, &path, &ext, &strip ) )
+ return NULL;
+
+ len = strlen( path ) + 1; /* + 1 to consider ending '\0' */
+ if( ext )
+ lenext = strlen( ext ) + 1;
+
+ if( ( len + lenext ) > FILE_MAXDIR + FILE_MAXFILE )
+ return PyErr_Format( PyExc_RuntimeError, "path too long" );
+
+ p = strrchr( path, DIRSEP );
+
+ if( p && strip ) {
+ n = path + len - p;
+ BLI_strncpy( basename, p + 1, n ); /* + 1 to skip the sep */
+ } else
+ BLI_strncpy( basename, path, len );
+
+ dot = strrchr( basename, '.' );
+
+ /* now the extension: always remove the one in basename */
+ if( dot || ext ) {
+ if( !ext )
+ basename[dot - basename] = '\0';
+ else { /* if user gave an ext, append it */
+
+ if( dot )
+ n = dot - basename;
+ else
+ n = strlen( basename );
+
+ BLI_strncpy( basename + n, ext, lenext );
+ }
+ }
+
+ return PyUnicode_FromString( basename );
+}
+
+static PyObject *M_sys_time( PyObject * self )
+{
+ return PyFloat_FromDouble( PIL_check_seconds_timer( ) );
+}
+
+static PyObject *M_sys_sleep( PyObject * self, PyObject * args )
+{
+ int millisecs = 10;
+
+ if( !PyArg_ParseTuple( args, "|i:Blender.sys.sleep", &millisecs ) )
+ return NULL;
+
+ PIL_sleep_ms( millisecs );
+
+ Py_RETURN_NONE;
+}
+
+static PyObject *M_sys_exists( PyObject * self, PyObject * value )
+{
+ char *fname = _PyUnicode_AsString(value);
+
+ int mode = 0, i = -1;
+
+ if( !fname )
+ return PyErr_Format( PyExc_TypeError, "expected string (pathname) argument" );
+
+ mode = BLI_exist(fname);
+
+ if( mode == 0 )
+ i = 0;
+ else if( S_ISREG( mode ) )
+ i = 1;
+ else if( S_ISDIR( mode ) )
+ i = 2;
+ /* i stays as -1 if path exists but is neither a regular file nor a dir */
+
+ return PyLong_FromLong(i);
+}
+
+static PyObject *M_sys_expandpath( PyObject * self, PyObject * value )
+{
+ char *path = _PyUnicode_AsString(value);
+ char expanded[FILE_MAXDIR + FILE_MAXFILE];
+ bContext *C = BPy_GetContext();
+ Scene *scene = CTX_data_scene(C);
+
+ if (!path)
+ return PyErr_Format( PyExc_TypeError, "expected string argument" );
+
+ BLI_strncpy(expanded, path, FILE_MAXDIR + FILE_MAXFILE);
+ BLI_convertstringcode(expanded, G.sce);
+ BLI_convertstringframe(expanded, scene->r.cfra);
+
+ return PyUnicode_FromString(expanded);
+}
+
+static PyObject *M_sys_cleanpath( PyObject * self, PyObject * value )
+{
+ char *path = _PyUnicode_AsString(value);
+ char cleaned[FILE_MAXDIR + FILE_MAXFILE];
+ int trailing_slash = 0, last;
+ if (!path)
+ return PyErr_Format( PyExc_TypeError, "expected string argument" );
+ last = strlen(path)-1;
+ if ((last >= 0) && ((path[last]=='/') || (path[last]=='\\'))) {
+ trailing_slash = 1;
+ }
+ BLI_strncpy(cleaned, path, FILE_MAXDIR + FILE_MAXFILE);
+ BLI_cleanup_file(NULL, cleaned);
+
+ if (trailing_slash) {
+ BLI_add_slash(cleaned);
+ }
+
+ return PyUnicode_FromString(cleaned);
+}
+
+static PyObject *M_sys_relpath( PyObject * self, PyObject * args )
+{
+ char *base = G.sce;
+ char *path;
+ char relpath[FILE_MAXDIR + FILE_MAXFILE];
+
+ if( !PyArg_ParseTuple( args, "s|s:Blender.sys.relpath", &path, &base ) )
+ return NULL;
+
+ strncpy(relpath, path, sizeof(relpath));
+ BLI_makestringcode(base, relpath);
+
+ return PyUnicode_FromString(relpath);
+}
+
+#if 0
+
+static PyObject *bpy_sys_get_blender_version()
+{
+ return PyUnicode_FromString(G.version);
+}
+
+#endif
diff --git a/source/blender/python/intern/bpy_sys.h b/source/blender/python/intern/bpy_sys.h
new file mode 100644
index 00000000000..a045ed0d537
--- /dev/null
+++ b/source/blender/python/intern/bpy_sys.h
@@ -0,0 +1,41 @@
+/*
+ * $Id: Sys.h 14444 2008-04-16 22:40:48Z hos $
+ *
+ * ***** BEGIN GPL LICENSE BLOCK *****
+ *
+ * This program is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU General Public License
+ * as published by the Free Software Foundation; either version 2
+ * of the License, or (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program; if not, write to the Free Software Foundation,
+ * Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
+ *
+ * The Original Code is Copyright (C) 2001-2002 by NaN Holding BV.
+ * All rights reserved.
+ *
+ * This is a new part of Blender.
+ *
+ * Contributor(s): Willian P. Germano
+ *
+ * ***** END GPL LICENSE BLOCK *****
+*/
+
+#ifndef BPY_SYS_H
+
+/* #include <Python.h> */
+
+/* PyObject *sys_Init( void ); */
+
+#include <Python.h>
+
+PyObject *BPY_sys_module( void );
+
+
+#endif /* BPY_SYS_H */