Welcome to mirror list, hosted at ThFree Co, Russian Federation.

git.blender.org/blender.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
Diffstat (limited to 'release/scripts/op/io_scene_obj')
-rw-r--r--release/scripts/op/io_scene_obj/__init__.py144
-rw-r--r--release/scripts/op/io_scene_obj/export_obj.py882
-rw-r--r--release/scripts/op/io_scene_obj/import_obj.py1221
3 files changed, 0 insertions, 2247 deletions
diff --git a/release/scripts/op/io_scene_obj/__init__.py b/release/scripts/op/io_scene_obj/__init__.py
deleted file mode 100644
index d3791d1cd95..00000000000
--- a/release/scripts/op/io_scene_obj/__init__.py
+++ /dev/null
@@ -1,144 +0,0 @@
-# ##### BEGIN GPL LICENSE BLOCK #####
-#
-# This program is free software; you can redistribute it and/or
-# modify it under the terms of the GNU General Public License
-# as published by the Free Software Foundation; either version 2
-# of the License, or (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program; if not, write to the Free Software Foundation,
-# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
-#
-# ##### END GPL LICENSE BLOCK #####
-
-# <pep8 compliant>
-
-# To support reload properly, try to access a package var, if it's there, reload everything
-if "bpy" in locals():
- # only reload if we alredy loaded, highly annoying
- import sys
- reload(sys.modules.get("io_scene_obj.import_obj", sys))
- reload(sys.modules.get("io_scene_obj.export_obj", sys))
-
-
-import bpy
-from bpy.props import *
-from io_utils import ExportHelper, ImportHelper
-
-
-class ImportOBJ(bpy.types.Operator, ImportHelper):
- '''Load a Wavefront OBJ File'''
- bl_idname = "import_scene.obj"
- bl_label = "Import OBJ"
-
- filename_ext = ".obj"
-
- CREATE_SMOOTH_GROUPS = BoolProperty(name="Smooth Groups", description="Surround smooth groups by sharp edges", default= True)
- CREATE_FGONS = BoolProperty(name="NGons as FGons", description="Import faces with more then 4 verts as fgons", default= True)
- CREATE_EDGES = BoolProperty(name="Lines as Edges", description="Import lines and faces with 2 verts as edge", default= True)
- SPLIT_OBJECTS = BoolProperty(name="Object", description="Import OBJ Objects into Blender Objects", default= True)
- SPLIT_GROUPS = BoolProperty(name="Group", description="Import OBJ Groups into Blender Objects", default= True)
- # old comment: only used for user feedback
- # disabled this option because in old code a handler for it disabled SPLIT* params, it's not passed to load_obj
- # KEEP_VERT_ORDER = BoolProperty(name="Keep Vert Order", description="Keep vert and face order, disables split options, enable for morph targets", default= True)
- ROTATE_X90 = BoolProperty(name="-X90", description="Rotate X 90.", default= True)
- CLAMP_SIZE = FloatProperty(name="Clamp Scale", description="Clamp the size to this maximum (Zero to Disable)", min=0.0, max=1000.0, soft_min=0.0, soft_max=1000.0, default=0.0)
- POLYGROUPS = BoolProperty(name="Poly Groups", description="Import OBJ groups as vertex groups.", default= True)
- IMAGE_SEARCH = BoolProperty(name="Image Search", description="Search subdirs for any assosiated images (Warning, may be slow)", default= True)
-
-
- def execute(self, context):
- # print("Selected: " + context.active_object.name)
- import io_scene_obj.import_obj
- return io_scene_obj.import_obj.load(self, context, **self.properties)
- '''
- load_obj(self.properties.filepath,
- context,
- self.properties.CLAMP_SIZE,
- self.properties.CREATE_FGONS,
- self.properties.CREATE_SMOOTH_GROUPS,
- self.properties.CREATE_EDGES,
- self.properties.SPLIT_OBJECTS,
- self.properties.SPLIT_GROUPS,
- self.properties.ROTATE_X90,
- self.properties.IMAGE_SEARCH,
- self.properties.POLYGROUPS)
- '''
-
- return {'FINISHED'}
-
-
-class ExportOBJ(bpy.types.Operator, ExportHelper):
- '''Save a Wavefront OBJ File'''
-
- bl_idname = "export_scene.obj"
- bl_label = 'Export OBJ'
-
- filename_ext = ".obj"
-
- # List of operator properties, the attributes will be assigned
- # to the class instance from the operator settings before calling.
-
- # context group
- use_selection = BoolProperty(name="Selection Only", description="Export selected objects only", default= False)
- use_all_scenes = BoolProperty(name="All Scenes", description="", default= False)
- use_animation = BoolProperty(name="Animation", description="", default= False)
-
- # object group
- use_modifiers = BoolProperty(name="Apply Modifiers", description="Apply modifiers (preview resolution)", default= True)
- use_rotate_x90 = BoolProperty(name="Rotate X90", description="", default= True)
-
- # extra data group
- use_edges = BoolProperty(name="Edges", description="", default=True)
- use_normals = BoolProperty(name="Normals", description="", default=False)
- use_hq_normals = BoolProperty(name="High Quality Normals", description="", default=True)
- use_uvs = BoolProperty(name="UVs", description="", default= True)
- use_materials = BoolProperty(name="Materials", description="", default=True)
- copy_images = BoolProperty(name="Copy Images", description="", default=False)
- use_triangles = BoolProperty(name="Triangulate", description="", default=False)
- use_vertex_groups = BoolProperty(name="Polygroups", description="", default=False)
- use_nurbs = BoolProperty(name="Nurbs", description="", default=False)
-
- # grouping group
- use_blen_objects = BoolProperty(name="Objects as OBJ Objects", description="", default= True)
- group_by_object = BoolProperty(name="Objects as OBJ Groups ", description="", default= False)
- group_by_material = BoolProperty(name="Material Groups", description="", default= False)
- keep_vertex_order = BoolProperty(name="Keep Vertex Order", description="", default= False)
-
-
- def execute(self, context):
- import io_scene_obj.export_obj
- print(self.properties.keys())
- return io_scene_obj.export_obj.save(self, context, **self.properties)
-
-
-def menu_func_import(self, context):
- self.layout.operator(ImportOBJ.bl_idname, text="Wavefront (.obj)")
-
-
-def menu_func_export(self, context):
- self.layout.operator(ExportOBJ.bl_idname, text="Wavefront (.obj)")
-
-
-def register():
- bpy.types.INFO_MT_file_import.append(menu_func_import)
- bpy.types.INFO_MT_file_export.append(menu_func_export)
-
-def unregister():
- bpy.types.INFO_MT_file_import.remove(menu_func_import)
- bpy.types.INFO_MT_file_export.remove(menu_func_export)
-
-
-# CONVERSION ISSUES
-# - matrix problem
-# - duplis - only tested dupliverts
-# - all scenes export
-# + normals calculation
-
-if __name__ == "__main__":
- register()
diff --git a/release/scripts/op/io_scene_obj/export_obj.py b/release/scripts/op/io_scene_obj/export_obj.py
deleted file mode 100644
index 01f5b221546..00000000000
--- a/release/scripts/op/io_scene_obj/export_obj.py
+++ /dev/null
@@ -1,882 +0,0 @@
-# ##### BEGIN GPL LICENSE BLOCK #####
-#
-# This program is free software; you can redistribute it and/or
-# modify it under the terms of the GNU General Public License
-# as published by the Free Software Foundation; either version 2
-# of the License, or (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program; if not, write to the Free Software Foundation,
-# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
-#
-# ##### END GPL LICENSE BLOCK #####
-
-# <pep8 compliant>
-
-import os
-import time
-import shutil
-
-import bpy
-import mathutils
-
-def fixName(name):
- if name == None:
- return 'None'
- else:
- return name.replace(' ', '_')
-
-def write_mtl(scene, filepath, copy_images, mtl_dict):
-
- world = scene.world
- worldAmb = world.ambient_color
-
- dest_dir = os.path.dirname(filepath)
-
- def copy_image(image):
- fn = bpy.path.abspath(image.filepath)
- fn_strip = os.path.basename(fn)
- if copy_images:
- rel = fn_strip
- fn_abs_dest = os.path.join(dest_dir, fn_strip)
- if not os.path.exists(fn_abs_dest):
- shutil.copy(fn, fn_abs_dest)
- else:
- rel = fn
-
- return rel
-
-
- file = open(filepath, "w")
- # XXX
-# file.write('# Blender MTL File: %s\n' % Blender.Get('filepath').split('\\')[-1].split('/')[-1])
- file.write('# Material Count: %i\n' % len(mtl_dict))
- # Write material/image combinations we have used.
- for key, (mtl_mat_name, mat, img) in mtl_dict.items():
-
- # Get the Blender data for the material and the image.
- # Having an image named None will make a bug, dont do it :)
-
- file.write('newmtl %s\n' % mtl_mat_name) # Define a new material: matname_imgname
-
- if mat:
- file.write('Ns %.6f\n' % ((mat.specular_hardness-1) * 1.9607843137254901) ) # Hardness, convert blenders 1-511 to MTL's
- file.write('Ka %.6f %.6f %.6f\n' % tuple([c*mat.ambient for c in worldAmb]) ) # Ambient, uses mirror colour,
- file.write('Kd %.6f %.6f %.6f\n' % tuple([c*mat.diffuse_intensity for c in mat.diffuse_color]) ) # Diffuse
- file.write('Ks %.6f %.6f %.6f\n' % tuple([c*mat.specular_intensity for c in mat.specular_color]) ) # Specular
- if hasattr(mat, "ior"):
- file.write('Ni %.6f\n' % mat.ior) # Refraction index
- else:
- file.write('Ni %.6f\n' % 1.0)
- file.write('d %.6f\n' % mat.alpha) # Alpha (obj uses 'd' for dissolve)
-
- # 0 to disable lighting, 1 for ambient & diffuse only (specular color set to black), 2 for full lighting.
- if mat.use_shadeless:
- file.write('illum 0\n') # ignore lighting
- elif mat.specular_intensity == 0:
- file.write('illum 1\n') # no specular.
- else:
- file.write('illum 2\n') # light normaly
-
- else:
- #write a dummy material here?
- file.write('Ns 0\n')
- file.write('Ka %.6f %.6f %.6f\n' % tuple([c for c in worldAmb]) ) # Ambient, uses mirror colour,
- file.write('Kd 0.8 0.8 0.8\n')
- file.write('Ks 0.8 0.8 0.8\n')
- file.write('d 1\n') # No alpha
- file.write('illum 2\n') # light normaly
-
- # Write images!
- if img: # We have an image on the face!
- # write relative image path
- rel = copy_image(img)
- file.write('map_Kd %s\n' % rel) # Diffuse mapping image
-# file.write('map_Kd %s\n' % img.filepath.split('\\')[-1].split('/')[-1]) # Diffuse mapping image
-
- elif mat: # No face image. if we havea material search for MTex image.
- for mtex in mat.texture_slots:
- if mtex and mtex.texture.type == 'IMAGE':
- try:
- filepath = copy_image(mtex.texture.image)
-# filepath = mtex.texture.image.filepath.split('\\')[-1].split('/')[-1]
- file.write('map_Kd %s\n' % repr(filepath)[1:-1]) # Diffuse mapping image
- break
- except:
- # Texture has no image though its an image type, best ignore.
- pass
-
- file.write('\n\n')
-
- file.close()
-
-# XXX not used
-def copy_file(source, dest):
- file = open(source, 'rb')
- data = file.read()
- file.close()
-
- file = open(dest, 'wb')
- file.write(data)
- file.close()
-
-
-# XXX not used
-def copy_images(dest_dir):
- if dest_dir[-1] != os.sep:
- dest_dir += os.sep
-# if dest_dir[-1] != sys.sep:
-# dest_dir += sys.sep
-
- # Get unique image names
- uniqueImages = {}
- for matname, mat, image in mtl_dict.values(): # Only use image name
- # Get Texface images
- if image:
- uniqueImages[image] = image # Should use sets here. wait until Python 2.4 is default.
-
- # Get MTex images
- if mat:
- for mtex in mat.texture_slots:
- if mtex and mtex.texture.type == 'IMAGE':
- image_tex = mtex.texture.image
- if image_tex:
- try:
- uniqueImages[image_tex] = image_tex
- except:
- pass
-
- # Now copy images
- copyCount = 0
-
-# for bImage in uniqueImages.values():
-# image_path = bpy.path.abspath(bImage.filepath)
-# if bpy.sys.exists(image_path):
-# # Make a name for the target path.
-# dest_image_path = dest_dir + image_path.split('\\')[-1].split('/')[-1]
-# if not bpy.utils.exists(dest_image_path): # Image isnt already there
-# print('\tCopying "%s" > "%s"' % (image_path, dest_image_path))
-# copy_file(image_path, dest_image_path)
-# copyCount+=1
-
-# paths= bpy.util.copy_images(uniqueImages.values(), dest_dir)
-
- print('\tCopied %d images' % copyCount)
-
-
-def test_nurbs_compat(ob):
- if ob.type != 'CURVE':
- return False
-
- for nu in ob.data.splines:
- if nu.point_count_v == 1 and nu.type != 'BEZIER': # not a surface and not bezier
- return True
-
- return False
-
-
-def write_nurb(file, ob, ob_mat):
- tot_verts = 0
- cu = ob.data
-
- # use negative indices
- for nu in cu.splines:
- if nu.type == 'POLY':
- DEG_ORDER_U = 1
- else:
- DEG_ORDER_U = nu.order_u - 1 # odd but tested to be correct
-
- if nu.type == 'BEZIER':
- print("\tWarning, bezier curve:", ob.name, "only poly and nurbs curves supported")
- continue
-
- if nu.point_count_v > 1:
- print("\tWarning, surface:", ob.name, "only poly and nurbs curves supported")
- continue
-
- if len(nu.points) <= DEG_ORDER_U:
- print("\tWarning, order_u is lower then vert count, skipping:", ob.name)
- continue
-
- pt_num = 0
- do_closed = nu.use_cyclic_u
- do_endpoints = (do_closed == 0) and nu.use_endpoint_u
-
- for pt in nu.points:
- pt = ob_mat * pt.co.copy().resize3D()
- file.write('v %.6f %.6f %.6f\n' % (pt[0], pt[1], pt[2]))
- pt_num += 1
- tot_verts += pt_num
-
- file.write('g %s\n' % (fixName(ob.name))) # fixName(ob.getData(1)) could use the data name too
- file.write('cstype bspline\n') # not ideal, hard coded
- file.write('deg %d\n' % DEG_ORDER_U) # not used for curves but most files have it still
-
- curve_ls = [-(i+1) for i in range(pt_num)]
-
- # 'curv' keyword
- if do_closed:
- if DEG_ORDER_U == 1:
- pt_num += 1
- curve_ls.append(-1)
- else:
- pt_num += DEG_ORDER_U
- curve_ls = curve_ls + curve_ls[0:DEG_ORDER_U]
-
- file.write('curv 0.0 1.0 %s\n' % (' '.join([str(i) for i in curve_ls]))) # Blender has no U and V values for the curve
-
- # 'parm' keyword
- tot_parm = (DEG_ORDER_U + 1) + pt_num
- tot_parm_div = float(tot_parm-1)
- parm_ls = [(i/tot_parm_div) for i in range(tot_parm)]
-
- if do_endpoints: # end points, force param
- for i in range(DEG_ORDER_U+1):
- parm_ls[i] = 0.0
- parm_ls[-(1+i)] = 1.0
-
- file.write('parm u %s\n' % ' '.join( [str(i) for i in parm_ls] ))
-
- file.write('end\n')
-
- return tot_verts
-
-def write_file(filepath, objects, scene,
- EXPORT_TRI=False,
- EXPORT_EDGES=False,
- EXPORT_NORMALS=False,
- EXPORT_NORMALS_HQ=False,
- EXPORT_UV=True,
- EXPORT_MTL=True,
- EXPORT_COPY_IMAGES=False,
- EXPORT_APPLY_MODIFIERS=True,
- EXPORT_ROTX90=True,
- EXPORT_BLEN_OBS=True,
- EXPORT_GROUP_BY_OB=False,
- EXPORT_GROUP_BY_MAT=False,
- EXPORT_KEEP_VERT_ORDER=False,
- EXPORT_POLYGROUPS=False,
- EXPORT_CURVE_AS_NURBS=True):
- '''
- Basic write function. The context and options must be already set
- This can be accessed externaly
- eg.
- write( 'c:\\test\\foobar.obj', Blender.Object.GetSelected() ) # Using default options.
- '''
-
- # XXX
- import math
-
- def veckey3d(v):
- return round(v.x, 6), round(v.y, 6), round(v.z, 6)
-
- def veckey2d(v):
- return round(v[0], 6), round(v[1], 6)
- # return round(v.x, 6), round(v.y, 6)
-
- def findVertexGroupName(face, vWeightMap):
- """
- Searches the vertexDict to see what groups is assigned to a given face.
- We use a frequency system in order to sort out the name because a given vetex can
- belong to two or more groups at the same time. To find the right name for the face
- we list all the possible vertex group names with their frequency and then sort by
- frequency in descend order. The top element is the one shared by the highest number
- of vertices is the face's group
- """
- weightDict = {}
- for vert_index in face.vertices:
-# for vert in face:
- vWeights = vWeightMap[vert_index]
-# vWeights = vWeightMap[vert]
- for vGroupName, weight in vWeights:
- weightDict[vGroupName] = weightDict.get(vGroupName, 0) + weight
-
- if weightDict:
- alist = [(weight,vGroupName) for vGroupName, weight in weightDict.items()] # sort least to greatest amount of weight
- alist.sort()
- return(alist[-1][1]) # highest value last
- else:
- return '(null)'
-
- # TODO: implement this in C? dunno how it should be called...
- def getVertsFromGroup(me, group_index):
- ret = []
-
- for i, v in enumerate(me.vertices):
- for g in v.groups:
- if g.group == group_index:
- ret.append((i, g.weight))
-
- return ret
-
-
- print('OBJ Export path: %r' % filepath)
- temp_mesh_name = '~tmp-mesh'
-
- time1 = time.clock()
-# time1 = sys.time()
-# scn = Scene.GetCurrent()
-
- file = open(filepath, "w")
-
- # Write Header
- file.write('# Blender v%s OBJ File: %r\n' % (bpy.app.version_string, os.path.basename(bpy.data.filepath)))
- file.write('# www.blender.org\n')
-
- # Tell the obj file what material file to use.
- if EXPORT_MTL:
- mtlfilepath = os.path.splitext(filepath)[0] + ".mtl"
- file.write('mtllib %s\n' % repr(os.path.basename(mtlfilepath))[1:-1]) # filepath can contain non utf8 chars, use repr
-
- if EXPORT_ROTX90:
- mat_xrot90= mathutils.Matrix.Rotation(-math.pi/2, 4, 'X')
-
- # Initialize totals, these are updated each object
- totverts = totuvco = totno = 1
-
- face_vert_index = 1
-
- globalNormals = {}
-
- # A Dict of Materials
- # (material.name, image.name):matname_imagename # matname_imagename has gaps removed.
- mtl_dict = {}
-
- # Get all meshes
- for ob_main in objects:
-
- # ignore dupli children
- if ob_main.parent and ob_main.parent.dupli_type != 'NONE':
- # XXX
- print(ob_main.name, 'is a dupli child - ignoring')
- continue
-
- obs = []
- if ob_main.dupli_type != 'NONE':
- # XXX
- print('creating dupli_list on', ob_main.name)
- ob_main.create_dupli_list(scene)
-
- obs = [(dob.object, dob.matrix) for dob in ob_main.dupli_list]
-
- # XXX debug print
- print(ob_main.name, 'has', len(obs), 'dupli children')
- else:
- obs = [(ob_main, ob_main.matrix_world)]
-
- for ob, ob_mat in obs:
-
- # Nurbs curve support
- if EXPORT_CURVE_AS_NURBS and test_nurbs_compat(ob):
- if EXPORT_ROTX90:
- ob_mat = ob_mat * mat_xrot90
- totverts += write_nurb(file, ob, ob_mat)
- continue
- # END NURBS
-
- if ob.type != 'MESH':
- continue
-
- me = ob.create_mesh(scene, EXPORT_APPLY_MODIFIERS, 'PREVIEW')
-
- if EXPORT_ROTX90:
- me.transform(mat_xrot90 * ob_mat)
- else:
- me.transform(ob_mat)
-
-# # Will work for non meshes now! :)
-# me= BPyMesh.getMeshFromObject(ob, containerMesh, EXPORT_APPLY_MODIFIERS, EXPORT_POLYGROUPS, scn)
-# if not me:
-# continue
-
- if EXPORT_UV:
- faceuv = len(me.uv_textures) > 0
- if faceuv:
- uv_layer = me.uv_textures.active.data[:]
- else:
- faceuv = False
-
- me_verts = me.vertices[:]
-
- # XXX - todo, find a better way to do triangulation
- # ...removed convert_to_triface because it relies on editmesh
- '''
- # We have a valid mesh
- if EXPORT_TRI and me.faces:
- # Add a dummy object to it.
- has_quads = False
- for f in me.faces:
- if f.vertices[3] != 0:
- has_quads = True
- break
-
- if has_quads:
- newob = bpy.data.objects.new('temp_object', me)
- # if we forget to set Object.data - crash
- scene.objects.link(newob)
- newob.convert_to_triface(scene)
- # mesh will still be there
- scene.objects.unlink(newob)
- '''
-
- # Make our own list so it can be sorted to reduce context switching
- face_index_pairs = [ (face, index) for index, face in enumerate(me.faces)]
- # faces = [ f for f in me.faces ]
-
- if EXPORT_EDGES:
- edges = me.edges
- else:
- edges = []
-
- if not (len(face_index_pairs)+len(edges)+len(me.vertices)): # Make sure there is somthing to write
-
- # clean up
- bpy.data.meshes.remove(me)
-
- continue # dont bother with this mesh.
-
- # XXX
- # High Quality Normals
- if EXPORT_NORMALS and face_index_pairs:
- me.calc_normals()
-# if EXPORT_NORMALS_HQ:
-# BPyMesh.meshCalcNormals(me)
-# else:
-# # transforming normals is incorrect
-# # when the matrix is scaled,
-# # better to recalculate them
-# me.calcNormals()
-
- materials = me.materials
-
- materialNames = []
- materialItems = [m for m in materials]
- if materials:
- for mat in materials:
- if mat: # !=None
- materialNames.append(mat.name)
- else:
- materialNames.append(None)
- # Cant use LC because some materials are None.
- # materialNames = map(lambda mat: mat.name, materials) # Bug Blender, dosent account for null materials, still broken.
-
- # Possible there null materials, will mess up indicies
- # but at least it will export, wait until Blender gets fixed.
- materialNames.extend((16-len(materialNames)) * [None])
- materialItems.extend((16-len(materialItems)) * [None])
-
- # Sort by Material, then images
- # so we dont over context switch in the obj file.
- if EXPORT_KEEP_VERT_ORDER:
- pass
- elif faceuv:
- face_index_pairs.sort(key=lambda a: (a[0].material_index, hash(uv_layer[a[1]].image), a[0].use_smooth))
- elif len(materials) > 1:
- face_index_pairs.sort(key = lambda a: (a[0].material_index, a[0].use_smooth))
- else:
- # no materials
- face_index_pairs.sort(key = lambda a: a[0].use_smooth)
-# if EXPORT_KEEP_VERT_ORDER:
-# pass
-# elif faceuv:
-# try: faces.sort(key = lambda a: (a.mat, a.image, a.use_smooth))
-# except: faces.sort(lambda a,b: cmp((a.mat, a.image, a.use_smooth), (b.mat, b.image, b.use_smooth)))
-# elif len(materials) > 1:
-# try: faces.sort(key = lambda a: (a.mat, a.use_smooth))
-# except: faces.sort(lambda a,b: cmp((a.mat, a.use_smooth), (b.mat, b.use_smooth)))
-# else:
-# # no materials
-# try: faces.sort(key = lambda a: a.use_smooth)
-# except: faces.sort(lambda a,b: cmp(a.use_smooth, b.use_smooth))
-
- # Set the default mat to no material and no image.
- contextMat = (0, 0) # Can never be this, so we will label a new material teh first chance we get.
- contextSmooth = None # Will either be true or false, set bad to force initialization switch.
-
- if EXPORT_BLEN_OBS or EXPORT_GROUP_BY_OB:
- name1 = ob.name
- name2 = ob.data.name
- if name1 == name2:
- obnamestring = fixName(name1)
- else:
- obnamestring = '%s_%s' % (fixName(name1), fixName(name2))
-
- if EXPORT_BLEN_OBS:
- file.write('o %s\n' % obnamestring) # Write Object name
- else: # if EXPORT_GROUP_BY_OB:
- file.write('g %s\n' % obnamestring)
-
-
- # Vert
- for v in me_verts:
- file.write('v %.6f %.6f %.6f\n' % tuple(v.co))
-
- # UV
- if faceuv:
- uv_face_mapping = [[0,0,0,0] for i in range(len(face_index_pairs))] # a bit of a waste for tri's :/
-
- uv_dict = {} # could use a set() here
- uv_layer = me.uv_textures.active.data
- for f, f_index in face_index_pairs:
- for uv_index, uv in enumerate(uv_layer[f_index].uv):
- uvkey = veckey2d(uv)
- try:
- uv_face_mapping[f_index][uv_index] = uv_dict[uvkey]
- except:
- uv_face_mapping[f_index][uv_index] = uv_dict[uvkey] = len(uv_dict)
- file.write('vt %.6f %.6f\n' % tuple(uv))
-
- uv_unique_count = len(uv_dict)
-# del uv, uvkey, uv_dict, f_index, uv_index
- # Only need uv_unique_count and uv_face_mapping
-
- # NORMAL, Smooth/Non smoothed.
- if EXPORT_NORMALS:
- for f, f_index in face_index_pairs:
- if f.use_smooth:
- for v_idx in f.vertices:
- v = me_verts[v_idx]
- noKey = veckey3d(v.normal)
- if noKey not in globalNormals:
- globalNormals[noKey] = totno
- totno +=1
- file.write('vn %.6f %.6f %.6f\n' % noKey)
- else:
- # Hard, 1 normal from the face.
- noKey = veckey3d(f.normal)
- if noKey not in globalNormals:
- globalNormals[noKey] = totno
- totno +=1
- file.write('vn %.6f %.6f %.6f\n' % noKey)
-
- if not faceuv:
- f_image = None
-
- # XXX
- if EXPORT_POLYGROUPS:
- # Retrieve the list of vertex groups
-# vertGroupNames = me.getVertGroupNames()
-
- currentVGroup = ''
- # Create a dictionary keyed by face id and listing, for each vertex, the vertex groups it belongs to
- vgroupsMap = [[] for _i in range(len(me_verts))]
-# vgroupsMap = [[] for _i in xrange(len(me_verts))]
- for g in ob.vertex_groups:
-# for vertexGroupName in vertGroupNames:
- for v_idx, vWeight in getVertsFromGroup(me, g.index):
-# for v_idx, vWeight in me.getVertsFromGroup(vertexGroupName, 1):
- vgroupsMap[v_idx].append((g.name, vWeight))
-
- for f, f_index in face_index_pairs:
- f_v = [me_verts[v_idx] for v_idx in f.vertices]
-
- # if f.vertices[3] == 0:
- # f_v.pop()
-
-# f_v= f.v
- f_smooth= f.use_smooth
- f_mat = min(f.material_index, len(materialNames)-1)
-# f_mat = min(f.mat, len(materialNames)-1)
- if faceuv:
-
- tface = uv_layer[f_index]
-
- f_image = tface.image
- f_uv = tface.uv
- # f_uv= [tface.uv1, tface.uv2, tface.uv3]
- # if len(f.vertices) == 4:
- # f_uv.append(tface.uv4)
-# f_image = f.image
-# f_uv= f.uv
-
- # MAKE KEY
- if faceuv and f_image: # Object is always true.
- key = materialNames[f_mat], f_image.name
- else:
- key = materialNames[f_mat], None # No image, use None instead.
-
- # Write the vertex group
- if EXPORT_POLYGROUPS:
- if len(ob.vertex_groups):
- # find what vertext group the face belongs to
- theVGroup = findVertexGroupName(f,vgroupsMap)
- if theVGroup != currentVGroup:
- currentVGroup = theVGroup
- file.write('g %s\n' % theVGroup)
-# # Write the vertex group
-# if EXPORT_POLYGROUPS:
-# if vertGroupNames:
-# # find what vertext group the face belongs to
-# theVGroup = findVertexGroupName(f,vgroupsMap)
-# if theVGroup != currentVGroup:
-# currentVGroup = theVGroup
-# file.write('g %s\n' % theVGroup)
-
- # CHECK FOR CONTEXT SWITCH
- if key == contextMat:
- pass # Context already switched, dont do anything
- else:
- if key[0] == None and key[1] == None:
- # Write a null material, since we know the context has changed.
- if EXPORT_GROUP_BY_MAT:
- # can be mat_image or (null)
- file.write('g %s_%s\n' % (fixName(ob.name), fixName(ob.data.name)) ) # can be mat_image or (null)
- file.write('usemtl (null)\n') # mat, image
-
- else:
- mat_data= mtl_dict.get(key)
- if not mat_data:
- # First add to global dict so we can export to mtl
- # Then write mtl
-
- # Make a new names from the mat and image name,
- # converting any spaces to underscores with fixName.
-
- # If none image dont bother adding it to the name
- if key[1] == None:
- mat_data = mtl_dict[key] = ('%s'%fixName(key[0])), materialItems[f_mat], f_image
- else:
- mat_data = mtl_dict[key] = ('%s_%s' % (fixName(key[0]), fixName(key[1]))), materialItems[f_mat], f_image
-
- if EXPORT_GROUP_BY_MAT:
- file.write('g %s_%s_%s\n' % (fixName(ob.name), fixName(ob.data.name), mat_data[0]) ) # can be mat_image or (null)
-
- file.write('usemtl %s\n' % mat_data[0]) # can be mat_image or (null)
-
- contextMat = key
- if f_smooth != contextSmooth:
- if f_smooth: # on now off
- file.write('s 1\n')
- contextSmooth = f_smooth
- else: # was off now on
- file.write('s off\n')
- contextSmooth = f_smooth
-
- file.write('f')
- if faceuv:
- if EXPORT_NORMALS:
- if f_smooth: # Smoothed, use vertex normals
- for vi, v in enumerate(f_v):
- file.write( ' %d/%d/%d' % \
- (v.index + totverts,
- totuvco + uv_face_mapping[f_index][vi],
- globalNormals[ veckey3d(v.normal) ]) ) # vert, uv, normal
-
- else: # No smoothing, face normals
- no = globalNormals[ veckey3d(f.normal) ]
- for vi, v in enumerate(f_v):
- file.write( ' %d/%d/%d' % \
- (v.index + totverts,
- totuvco + uv_face_mapping[f_index][vi],
- no) ) # vert, uv, normal
- else: # No Normals
- for vi, v in enumerate(f_v):
- file.write( ' %d/%d' % (\
- v.index + totverts,\
- totuvco + uv_face_mapping[f_index][vi])) # vert, uv
-
- face_vert_index += len(f_v)
-
- else: # No UV's
- if EXPORT_NORMALS:
- if f_smooth: # Smoothed, use vertex normals
- for v in f_v:
- file.write( ' %d//%d' %
- (v.index + totverts, globalNormals[ veckey3d(v.normal) ]) )
- else: # No smoothing, face normals
- no = globalNormals[ veckey3d(f.normal) ]
- for v in f_v:
- file.write( ' %d//%d' % (v.index + totverts, no) )
- else: # No Normals
- for v in f_v:
- file.write( ' %d' % (v.index + totverts) )
-
- file.write('\n')
-
- # Write edges.
- if EXPORT_EDGES:
- for ed in edges:
- if ed.is_loose:
- file.write('f %d %d\n' % (ed.vertices[0] + totverts, ed.vertices[1] + totverts))
-
- # Make the indicies global rather then per mesh
- totverts += len(me_verts)
- if faceuv:
- totuvco += uv_unique_count
-
- # clean up
- bpy.data.meshes.remove(me)
-
- if ob_main.dupli_type != 'NONE':
- ob_main.free_dupli_list()
-
- file.close()
-
-
- # Now we have all our materials, save them
- if EXPORT_MTL:
- write_mtl(scene, mtlfilepath, EXPORT_COPY_IMAGES, mtl_dict)
-# if EXPORT_COPY_IMAGES:
-# dest_dir = os.path.basename(filepath)
-# # dest_dir = filepath
-# # # Remove chars until we are just the path.
-# # while dest_dir and dest_dir[-1] not in '\\/':
-# # dest_dir = dest_dir[:-1]
-# if dest_dir:
-# copy_images(dest_dir, mtl_dict)
-# else:
-# print('\tError: "%s" could not be used as a base for an image path.' % filepath)
-
- print("OBJ Export time: %.2f" % (time.clock() - time1))
-
-#
-def _write(context, filepath,
- EXPORT_TRI, # ok
- EXPORT_EDGES,
- EXPORT_NORMALS, # not yet
- EXPORT_NORMALS_HQ, # not yet
- EXPORT_UV, # ok
- EXPORT_MTL,
- EXPORT_COPY_IMAGES,
- EXPORT_APPLY_MODIFIERS, # ok
- EXPORT_ROTX90, # wrong
- EXPORT_BLEN_OBS,
- EXPORT_GROUP_BY_OB,
- EXPORT_GROUP_BY_MAT,
- EXPORT_KEEP_VERT_ORDER,
- EXPORT_POLYGROUPS,
- EXPORT_CURVE_AS_NURBS,
- EXPORT_SEL_ONLY, # ok
- EXPORT_ALL_SCENES, # XXX not working atm
- EXPORT_ANIMATION): # Not used
-
- base_name, ext = os.path.splitext(filepath)
- context_name = [base_name, '', '', ext] # Base name, scene name, frame number, extension
-
- orig_scene = context.scene
-
- # Exit edit mode before exporting, so current object states are exported properly.
- if bpy.ops.object.mode_set.poll():
- bpy.ops.object.mode_set(mode='OBJECT')
-
-# if EXPORT_ALL_SCENES:
-# export_scenes = bpy.data.scenes
-# else:
-# export_scenes = [orig_scene]
-
- # XXX only exporting one scene atm since changing
- # current scene is not possible.
- # Brecht says that ideally in 2.5 we won't need such a function,
- # allowing multiple scenes open at once.
- export_scenes = [orig_scene]
-
- # Export all scenes.
- for scene in export_scenes:
- # scene.makeCurrent() # If already current, this is not slow.
- # context = scene.getRenderingContext()
- orig_frame = scene.frame_current
-
- if EXPORT_ALL_SCENES: # Add scene name into the context_name
- context_name[1] = '_%s' % bpy.path.clean_name(scene.name) # WARNING, its possible that this could cause a collision. we could fix if were feeling parranoied.
-
- # Export an animation?
- if EXPORT_ANIMATION:
- scene_frames = range(scene.frame_start, context.frame_end + 1) # Up to and including the end frame.
- else:
- scene_frames = [orig_frame] # Dont export an animation.
-
- # Loop through all frames in the scene and export.
- for frame in scene_frames:
- if EXPORT_ANIMATION: # Add frame to the filepath.
- context_name[2] = '_%.6d' % frame
-
- scene.frame_current = frame
- if EXPORT_SEL_ONLY:
- objects = context.selected_objects
- else:
- objects = scene.objects
-
- full_path= ''.join(context_name)
-
- # erm... bit of a problem here, this can overwrite files when exporting frames. not too bad.
- # EXPORT THE FILE.
- write_file(full_path, objects, scene,
- EXPORT_TRI,
- EXPORT_EDGES,
- EXPORT_NORMALS,
- EXPORT_NORMALS_HQ,
- EXPORT_UV,
- EXPORT_MTL,
- EXPORT_COPY_IMAGES,
- EXPORT_APPLY_MODIFIERS,
- EXPORT_ROTX90,
- EXPORT_BLEN_OBS,
- EXPORT_GROUP_BY_OB,
- EXPORT_GROUP_BY_MAT,
- EXPORT_KEEP_VERT_ORDER,
- EXPORT_POLYGROUPS,
- EXPORT_CURVE_AS_NURBS)
-
-
- scene.frame_current = orig_frame
-
- # Restore old active scene.
-# orig_scene.makeCurrent()
-# Window.WaitCursor(0)
-
-
-'''
-Currently the exporter lacks these features:
-* multiple scene export (only active scene is written)
-* particles
-'''
-
-
-def save(operator, context, filepath="",
- use_triangles=False,
- use_edges=False,
- use_normals=False,
- use_hq_normals=False,
- use_uvs=True,
- use_materials=True,
- copy_images=False,
- use_modifiers=True,
- use_rotate_x90=True,
- use_blen_objects=True,
- group_by_object=False,
- group_by_material=False,
- keep_vertex_order=False,
- use_vertex_groups=False,
- use_nurbs=True,
- use_selection=True,
- use_all_scenes=False,
- use_animation=False,
- ):
-
- _write(context, filepath,
- EXPORT_TRI=use_triangles,
- EXPORT_EDGES=use_edges,
- EXPORT_NORMALS=use_normals,
- EXPORT_NORMALS_HQ=use_hq_normals,
- EXPORT_UV=use_uvs,
- EXPORT_MTL=use_materials,
- EXPORT_COPY_IMAGES=copy_images,
- EXPORT_APPLY_MODIFIERS=use_modifiers,
- EXPORT_ROTX90=use_rotate_x90,
- EXPORT_BLEN_OBS=use_blen_objects,
- EXPORT_GROUP_BY_OB=group_by_object,
- EXPORT_GROUP_BY_MAT=group_by_material,
- EXPORT_KEEP_VERT_ORDER=keep_vertex_order,
- EXPORT_POLYGROUPS=use_vertex_groups,
- EXPORT_CURVE_AS_NURBS=use_nurbs,
- EXPORT_SEL_ONLY=use_selection,
- EXPORT_ALL_SCENES=use_all_scenes,
- EXPORT_ANIMATION=use_animation,
- )
-
- return {'FINISHED'}
diff --git a/release/scripts/op/io_scene_obj/import_obj.py b/release/scripts/op/io_scene_obj/import_obj.py
deleted file mode 100644
index a5605d23cb3..00000000000
--- a/release/scripts/op/io_scene_obj/import_obj.py
+++ /dev/null
@@ -1,1221 +0,0 @@
-# ##### BEGIN GPL LICENSE BLOCK #####
-#
-# This program is free software; you can redistribute it and/or
-# modify it under the terms of the GNU General Public License
-# as published by the Free Software Foundation; either version 2
-# of the License, or (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program; if not, write to the Free Software Foundation,
-# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
-#
-# ##### END GPL LICENSE BLOCK #####
-
-# <pep8 compliant>
-
-# Script copyright (C) Campbell Barton
-# Contributors: Campbell Barton, Jiri Hnidek, Paolo Ciccone
-
-"""
-This script imports a Wavefront OBJ files to Blender.
-
-Usage:
-Run this script from "File->Import" menu and then load the desired OBJ file.
-Note, This loads mesh objects and materials only, nurbs and curves are not supported.
-
-http://wiki.blender.org/index.php/Scripts/Manual/Import/wavefront_obj
-"""
-
-import os
-import time
-import bpy
-import mathutils
-from geometry import PolyFill
-from io_utils import load_image, unpack_list, unpack_face_list
-
-
-def BPyMesh_ngon(from_data, indices, PREF_FIX_LOOPS= True):
- '''
- Takes a polyline of indices (fgon)
- and returns a list of face indicie lists.
- Designed to be used for importers that need indices for an fgon to create from existing verts.
-
- from_data: either a mesh, or a list/tuple of vectors.
- indices: a list of indicies to use this list is the ordered closed polyline to fill, and can be a subset of the data given.
- PREF_FIX_LOOPS: If this is enabled polylines that use loops to make multiple polylines are delt with correctly.
- '''
-
- if not set: # Need sets for this, otherwise do a normal fill.
- PREF_FIX_LOOPS= False
-
- Vector= mathutils.Vector
- if not indices:
- return []
-
- # return []
- def rvec(co): return round(co.x, 6), round(co.y, 6), round(co.z, 6)
- def mlen(co): return abs(co[0])+abs(co[1])+abs(co[2]) # manhatten length of a vector, faster then length
-
- def vert_treplet(v, i):
- return v, rvec(v), i, mlen(v)
-
- def ed_key_mlen(v1, v2):
- if v1[3] > v2[3]:
- return v2[1], v1[1]
- else:
- return v1[1], v2[1]
-
-
- if not PREF_FIX_LOOPS:
- '''
- Normal single concave loop filling
- '''
- if type(from_data) in (tuple, list):
- verts= [Vector(from_data[i]) for ii, i in enumerate(indices)]
- else:
- verts= [from_data.vertices[i].co for ii, i in enumerate(indices)]
-
- for i in range(len(verts)-1, 0, -1): # same as reversed(xrange(1, len(verts))):
- if verts[i][1]==verts[i-1][0]:
- verts.pop(i-1)
-
- fill= PolyFill([verts])
-
- else:
- '''
- Seperate this loop into multiple loops be finding edges that are used twice
- This is used by lightwave LWO files a lot
- '''
-
- if type(from_data) in (tuple, list):
- verts= [vert_treplet(Vector(from_data[i]), ii) for ii, i in enumerate(indices)]
- else:
- verts= [vert_treplet(from_data.vertices[i].co, ii) for ii, i in enumerate(indices)]
-
- edges= [(i, i-1) for i in range(len(verts))]
- if edges:
- edges[0]= (0,len(verts)-1)
-
- if not verts:
- return []
-
-
- edges_used= set()
- edges_doubles= set()
- # We need to check if any edges are used twice location based.
- for ed in edges:
- edkey= ed_key_mlen(verts[ed[0]], verts[ed[1]])
- if edkey in edges_used:
- edges_doubles.add(edkey)
- else:
- edges_used.add(edkey)
-
- # Store a list of unconnected loop segments split by double edges.
- # will join later
- loop_segments= []
-
- v_prev= verts[0]
- context_loop= [v_prev]
- loop_segments= [context_loop]
-
- for v in verts:
- if v!=v_prev:
- # Are we crossing an edge we removed?
- if ed_key_mlen(v, v_prev) in edges_doubles:
- context_loop= [v]
- loop_segments.append(context_loop)
- else:
- if context_loop and context_loop[-1][1]==v[1]:
- #raise "as"
- pass
- else:
- context_loop.append(v)
-
- v_prev= v
- # Now join loop segments
-
- def join_seg(s1,s2):
- if s2[-1][1]==s1[0][1]: #
- s1,s2= s2,s1
- elif s1[-1][1]==s2[0][1]:
- pass
- else:
- return False
-
- # If were stuill here s1 and s2 are 2 segments in the same polyline
- s1.pop() # remove the last vert from s1
- s1.extend(s2) # add segment 2 to segment 1
-
- if s1[0][1]==s1[-1][1]: # remove endpoints double
- s1.pop()
-
- s2[:]= [] # Empty this segment s2 so we dont use it again.
- return True
-
- joining_segments= True
- while joining_segments:
- joining_segments= False
- segcount= len(loop_segments)
-
- for j in range(segcount-1, -1, -1): #reversed(range(segcount)):
- seg_j= loop_segments[j]
- if seg_j:
- for k in range(j-1, -1, -1): # reversed(range(j)):
- if not seg_j:
- break
- seg_k= loop_segments[k]
-
- if seg_k and join_seg(seg_j, seg_k):
- joining_segments= True
-
- loop_list= loop_segments
-
- for verts in loop_list:
- while verts and verts[0][1]==verts[-1][1]:
- verts.pop()
-
- loop_list= [verts for verts in loop_list if len(verts)>2]
- # DONE DEALING WITH LOOP FIXING
-
-
- # vert mapping
- vert_map= [None]*len(indices)
- ii=0
- for verts in loop_list:
- if len(verts)>2:
- for i, vert in enumerate(verts):
- vert_map[i+ii]= vert[2]
- ii+=len(verts)
-
- fill= PolyFill([ [v[0] for v in loop] for loop in loop_list ])
- #draw_loops(loop_list)
- #raise 'done loop'
- # map to original indicies
- fill= [[vert_map[i] for i in reversed(f)] for f in fill]
-
-
- if not fill:
- print('Warning Cannot scanfill, fallback on a triangle fan.')
- fill= [ [0, i-1, i] for i in range(2, len(indices)) ]
- else:
- # Use real scanfill.
- # See if its flipped the wrong way.
- flip= None
- for fi in fill:
- if flip != None:
- break
- for i, vi in enumerate(fi):
- if vi==0 and fi[i-1]==1:
- flip= False
- break
- elif vi==1 and fi[i-1]==0:
- flip= True
- break
-
- if not flip:
- for i, fi in enumerate(fill):
- fill[i]= tuple([ii for ii in reversed(fi)])
-
- return fill
-
-def line_value(line_split):
- '''
- Returns 1 string represneting the value for this line
- None will be returned if theres only 1 word
- '''
- length= len(line_split)
- if length == 1:
- return None
-
- elif length == 2:
- return line_split[1]
-
- elif length > 2:
- return ' '.join( line_split[1:] )
-
-
-def obj_image_load(imagepath, DIR, IMAGE_SEARCH):
- if '_' in imagepath:
- image= load_image(imagepath.replace('_', ' '), DIR)
- if image:
- return image
-
- image = load_image(imagepath, DIR)
- if image:
- return image
-
- print("failed to load '%s' doesn't exist", imagepath)
- return None
-
-# def obj_image_load(imagepath, DIR, IMAGE_SEARCH):
-# '''
-# Mainly uses comprehensiveImageLoad
-# but tries to replace '_' with ' ' for Max's exporter replaces spaces with underscores.
-# '''
-
-# if '_' in imagepath:
-# image= BPyImage.comprehensiveImageLoad(imagepath, DIR, PLACE_HOLDER= False, RECURSIVE= IMAGE_SEARCH)
-# if image: return image
-# # Did the exporter rename the image?
-# image= BPyImage.comprehensiveImageLoad(imagepath.replace('_', ' '), DIR, PLACE_HOLDER= False, RECURSIVE= IMAGE_SEARCH)
-# if image: return image
-
-# # Return an image, placeholder if it dosnt exist
-# image= BPyImage.comprehensiveImageLoad(imagepath, DIR, PLACE_HOLDER= True, RECURSIVE= IMAGE_SEARCH)
-# return image
-
-
-def create_materials(filepath, material_libs, unique_materials, unique_material_images, IMAGE_SEARCH):
- '''
- Create all the used materials in this obj,
- assign colors and images to the materials from all referenced material libs
- '''
- DIR= os.path.dirname(filepath)
-
- #==================================================================================#
- # This function sets textures defined in .mtl file #
- #==================================================================================#
- def load_material_image(blender_material, context_material_name, imagepath, type):
-
- texture= bpy.data.textures.new(name=type, type='IMAGE')
-
- # Absolute path - c:\.. etc would work here
- image = obj_image_load(imagepath, DIR, IMAGE_SEARCH)
- has_data = False
-
- if image:
- texture.image = image
- has_data = image.has_data
-
- # Adds textures for materials (rendering)
- if type == 'Kd':
- if has_data and image.depth == 32:
- # Image has alpha
-
- # XXX bitmask won't work?
- mtex = blender_material.texture_slots.add()
- mtex.texture = texture
- mtex.texture_coords = 'UV'
- mtex.use_map_color_diffuse = True
- mtex.use_map_alpha = True
-
- texture.mipmap = True
- texture.interpolation = True
- texture.use_alpha = True
- blender_material.use_transparency = True
- blender_material.alpha = 0.0
- else:
- mtex = blender_material.texture_slots.add()
- mtex.texture = texture
- mtex.texture_coords = 'UV'
- mtex.use_map_color_diffuse = True
-
- # adds textures to faces (Textured/Alt-Z mode)
- # Only apply the diffuse texture to the face if the image has not been set with the inline usemat func.
- unique_material_images[context_material_name]= image, has_data # set the texface image
-
- elif type == 'Ka':
- mtex = blender_material.texture_slots.add()
- mtex.texture = texture
- mtex.texture_coords = 'UV'
- mtex.use_map_ambient = True
-# blender_material.setTexture(1, texture, Texture.TexCo.UV, Texture.MapTo.CMIR) # TODO- Add AMB to BPY API
-
- elif type == 'Ks':
- mtex = blender_material.texture_slots.add()
- mtex.texture = texture
- mtex.texture_coords = 'UV'
- mtex.use_map_specular = True
-# blender_material.setTexture(2, texture, Texture.TexCo.UV, Texture.MapTo.SPEC)
-
- elif type == 'Bump':
- mtex = blender_material.texture_slots.add()
- mtex.texture = texture
- mtex.texture_coords = 'UV'
- mtex.use_map_normal = True
-# blender_material.setTexture(3, texture, Texture.TexCo.UV, Texture.MapTo.NOR)
- elif type == 'D':
- mtex = blender_material.texture_slots.add()
- mtex.texture = texture
- mtex.texture_coords = 'UV'
- mtex.use_map_alpha = True
- blender_material.z_transparency = True
- blender_material.alpha = 0.0
-# blender_material.setTexture(4, texture, Texture.TexCo.UV, Texture.MapTo.ALPHA)
-# blender_material.mode |= Material.Modes.ZTRANSP
-# blender_material.alpha = 0.0
- # Todo, unset deffuse material alpha if it has an alpha channel
-
- elif type == 'refl':
- mtex = blender_material.texture_slots.add()
- mtex.texture = texture
- mtex.texture_coords = 'UV'
- mtex.use_map_reflect = True
-# blender_material.setTexture(5, texture, Texture.TexCo.UV, Texture.MapTo.REF)
-
-
- # Add an MTL with the same name as the obj if no MTLs are spesified.
- temp_mtl = os.path.splitext((os.path.basename(filepath)))[0] + '.mtl'
-
- if os.path.exists(os.path.join(DIR, temp_mtl)) and temp_mtl not in material_libs:
- material_libs.append( temp_mtl )
- del temp_mtl
-
- #Create new materials
- for name in unique_materials: # .keys()
- if name != None:
- unique_materials[name]= bpy.data.materials.new(name)
- unique_material_images[name]= None, False # assign None to all material images to start with, add to later.
-
- unique_materials[None]= None
- unique_material_images[None]= None, False
-
- for libname in material_libs:
- mtlpath= os.path.join(DIR, libname)
- if not os.path.exists(mtlpath):
- print ("\tError Missing MTL: '%s'" % mtlpath)
- else:
- #print '\t\tloading mtl: "%s"' % mtlpath
- context_material= None
- mtl= open(mtlpath, 'rU')
- for line in mtl: #.xreadlines():
- if line.startswith('newmtl'):
- context_material_name= line_value(line.split())
- if context_material_name in unique_materials:
- context_material = unique_materials[ context_material_name ]
- else:
- context_material = None
-
- elif context_material:
- # we need to make a material to assign properties to it.
- line_split= line.split()
- line_lower= line.lower().lstrip()
- if line_lower.startswith('ka'):
- context_material.mirror_color = (float(line_split[1]), float(line_split[2]), float(line_split[3]))
- elif line_lower.startswith('kd'):
- context_material.diffuse_color = (float(line_split[1]), float(line_split[2]), float(line_split[3]))
- elif line_lower.startswith('ks'):
- context_material.specular_color = (float(line_split[1]), float(line_split[2]), float(line_split[3]))
- elif line_lower.startswith('ns'):
- context_material.specular_hardness = int((float(line_split[1])*0.51))
- elif line_lower.startswith('ni'): # Refraction index
- context_material.raytrace_transparency.ior = max(1, min(float(line_split[1]), 3)) # Between 1 and 3
- elif line_lower.startswith('d') or line_lower.startswith('tr'):
- context_material.alpha = float(line_split[1])
- elif line_lower.startswith('map_ka'):
- img_filepath= line_value(line.split())
- if img_filepath:
- load_material_image(context_material, context_material_name, img_filepath, 'Ka')
- elif line_lower.startswith('map_ks'):
- img_filepath= line_value(line.split())
- if img_filepath:
- load_material_image(context_material, context_material_name, img_filepath, 'Ks')
- elif line_lower.startswith('map_kd'):
- img_filepath= line_value(line.split())
- if img_filepath:
- load_material_image(context_material, context_material_name, img_filepath, 'Kd')
- elif line_lower.startswith('map_bump'):
- img_filepath= line_value(line.split())
- if img_filepath:
- load_material_image(context_material, context_material_name, img_filepath, 'Bump')
- elif line_lower.startswith('map_d') or line_lower.startswith('map_tr'): # Alpha map - Dissolve
- img_filepath= line_value(line.split())
- if img_filepath:
- load_material_image(context_material, context_material_name, img_filepath, 'D')
-
- elif line_lower.startswith('refl'): # Reflectionmap
- img_filepath= line_value(line.split())
- if img_filepath:
- load_material_image(context_material, context_material_name, img_filepath, 'refl')
- mtl.close()
-
-
-
-
-def split_mesh(verts_loc, faces, unique_materials, filepath, SPLIT_OB_OR_GROUP):
- '''
- Takes vert_loc and faces, and separates into multiple sets of
- (verts_loc, faces, unique_materials, dataname)
- '''
-
- filename = os.path.splitext((os.path.basename(filepath)))[0]
-
- if not SPLIT_OB_OR_GROUP:
- # use the filename for the object name since we arnt chopping up the mesh.
- return [(verts_loc, faces, unique_materials, filename)]
-
- def key_to_name(key):
- # if the key is a tuple, join it to make a string
- if not key:
- return filename # assume its a string. make sure this is true if the splitting code is changed
- else:
- return key
-
- # Return a key that makes the faces unique.
- face_split_dict= {}
-
- oldkey= -1 # initialize to a value that will never match the key
-
- for face in faces:
- key= face[4]
-
- if oldkey != key:
- # Check the key has changed.
- try:
- verts_split, faces_split, unique_materials_split, vert_remap= face_split_dict[key]
- except KeyError:
- faces_split= []
- verts_split= []
- unique_materials_split= {}
- vert_remap= [-1]*len(verts_loc)
-
- face_split_dict[key]= (verts_split, faces_split, unique_materials_split, vert_remap)
-
- oldkey= key
-
- face_vert_loc_indicies= face[0]
-
- # Remap verts to new vert list and add where needed
- for enum, i in enumerate(face_vert_loc_indicies):
- if vert_remap[i] == -1:
- new_index= len(verts_split)
- vert_remap[i]= new_index # set the new remapped index so we only add once and can reference next time.
- face_vert_loc_indicies[enum] = new_index # remap to the local index
- verts_split.append( verts_loc[i] ) # add the vert to the local verts
- else:
- face_vert_loc_indicies[enum] = vert_remap[i] # remap to the local index
-
- matname= face[2]
- if matname and matname not in unique_materials_split:
- unique_materials_split[matname] = unique_materials[matname]
-
- faces_split.append(face)
-
- # remove one of the itemas and reorder
- return [(value[0], value[1], value[2], key_to_name(key)) for key, value in list(face_split_dict.items())]
-
-
-def create_mesh(new_objects, has_ngons, CREATE_FGONS, CREATE_EDGES, verts_loc, verts_tex, faces, unique_materials, unique_material_images, unique_smooth_groups, vertex_groups, dataname):
- '''
- Takes all the data gathered and generates a mesh, adding the new object to new_objects
- deals with fgons, sharp edges and assigning materials
- '''
- if not has_ngons:
- CREATE_FGONS= False
-
- if unique_smooth_groups:
- sharp_edges= {}
- smooth_group_users = {context_smooth_group: {} for context_smooth_group in list(unique_smooth_groups.keys())}
- context_smooth_group_old= -1
-
- # Split fgons into tri's
- fgon_edges= {} # Used for storing fgon keys
- if CREATE_EDGES:
- edges= []
-
- context_object= None
-
- # reverse loop through face indicies
- for f_idx in range(len(faces)-1, -1, -1):
-
- face_vert_loc_indicies,\
- face_vert_tex_indicies,\
- context_material,\
- context_smooth_group,\
- context_object= faces[f_idx]
-
- len_face_vert_loc_indicies = len(face_vert_loc_indicies)
-
- if len_face_vert_loc_indicies==1:
- faces.pop(f_idx)# cant add single vert faces
-
- elif not face_vert_tex_indicies or len_face_vert_loc_indicies == 2: # faces that have no texture coords are lines
- if CREATE_EDGES:
- # generators are better in python 2.4+ but can't be used in 2.3
- # edges.extend( (face_vert_loc_indicies[i], face_vert_loc_indicies[i+1]) for i in xrange(len_face_vert_loc_indicies-1) )
- edges.extend( [(face_vert_loc_indicies[i], face_vert_loc_indicies[i+1]) for i in range(len_face_vert_loc_indicies-1)] )
-
- faces.pop(f_idx)
- else:
-
- # Smooth Group
- if unique_smooth_groups and context_smooth_group:
- # Is a part of of a smooth group and is a face
- if context_smooth_group_old is not context_smooth_group:
- edge_dict= smooth_group_users[context_smooth_group]
- context_smooth_group_old= context_smooth_group
-
- for i in range(len_face_vert_loc_indicies):
- i1= face_vert_loc_indicies[i]
- i2= face_vert_loc_indicies[i-1]
- if i1>i2: i1,i2= i2,i1
-
- try:
- edge_dict[i1,i2]+= 1
- except KeyError:
- edge_dict[i1,i2]= 1
-
- # FGons into triangles
- if has_ngons and len_face_vert_loc_indicies > 4:
-
- ngon_face_indices= BPyMesh_ngon(verts_loc, face_vert_loc_indicies)
- faces.extend(
- [(
- [face_vert_loc_indicies[ngon[0]], face_vert_loc_indicies[ngon[1]], face_vert_loc_indicies[ngon[2]] ],
- [face_vert_tex_indicies[ngon[0]], face_vert_tex_indicies[ngon[1]], face_vert_tex_indicies[ngon[2]] ],
- context_material,
- context_smooth_group,
- context_object)
- for ngon in ngon_face_indices]
- )
-
- # edges to make fgons
- if CREATE_FGONS:
- edge_users= {}
- for ngon in ngon_face_indices:
- for i in (0,1,2):
- i1= face_vert_loc_indicies[ngon[i ]]
- i2= face_vert_loc_indicies[ngon[i-1]]
- if i1>i2: i1,i2= i2,i1
-
- try:
- edge_users[i1,i2]+=1
- except KeyError:
- edge_users[i1,i2]= 1
-
- for key, users in edge_users.items():
- if users>1:
- fgon_edges[key]= None
-
- # remove all after 3, means we dont have to pop this one.
- faces.pop(f_idx)
-
-
- # Build sharp edges
- if unique_smooth_groups:
- for edge_dict in list(smooth_group_users.values()):
- for key, users in list(edge_dict.items()):
- if users==1: # This edge is on the boundry of a group
- sharp_edges[key]= None
-
-
- # map the material names to an index
- material_mapping = {name: i for i, name in enumerate(unique_materials)} # enumerate over unique_materials keys()
-
- materials= [None] * len(unique_materials)
-
- for name, index in list(material_mapping.items()):
- materials[index]= unique_materials[name]
-
- me= bpy.data.meshes.new(dataname)
-
- # make sure the list isnt too big
- for material in materials:
- me.materials.append(material)
-
- me.vertices.add(len(verts_loc))
- me.faces.add(len(faces))
-
- # verts_loc is a list of (x, y, z) tuples
- me.vertices.foreach_set("co", unpack_list(verts_loc))
-
- # faces is a list of (vert_indices, texco_indices, ...) tuples
- # XXX faces should contain either 3 or 4 verts
- # XXX no check for valid face indices
- me.faces.foreach_set("vertices_raw", unpack_face_list([f[0] for f in faces]))
-
- if verts_tex and me.faces:
- me.uv_textures.new()
-
- context_material_old= -1 # avoid a dict lookup
- mat= 0 # rare case it may be un-initialized.
- me_faces= me.faces
-
- for i, face in enumerate(faces):
- if len(face[0]) < 2:
- pass #raise "bad face"
- elif len(face[0])==2:
- if CREATE_EDGES:
- edges.append(face[0])
- else:
-
- blender_face = me.faces[i]
-
- face_vert_loc_indicies,\
- face_vert_tex_indicies,\
- context_material,\
- context_smooth_group,\
- context_object= face
-
-
-
- if context_smooth_group:
- blender_face.use_smooth = True
-
- if context_material:
- if context_material_old is not context_material:
- mat= material_mapping[context_material]
- context_material_old= context_material
-
- blender_face.material_index= mat
-# blender_face.mat= mat
-
-
- if verts_tex:
-
- blender_tface= me.uv_textures[0].data[i]
-
- if context_material:
- image, has_data = unique_material_images[context_material]
- if image: # Can be none if the material dosnt have an image.
- blender_tface.image = image
- blender_tface.use_image = True
- if has_data and image.depth == 32:
- blender_tface.blend_type = 'ALPHA'
-
- # BUG - Evil eekadoodle problem where faces that have vert index 0 location at 3 or 4 are shuffled.
- if len(face_vert_loc_indicies)==4:
- if face_vert_loc_indicies[2]==0 or face_vert_loc_indicies[3]==0:
- face_vert_tex_indicies= face_vert_tex_indicies[2], face_vert_tex_indicies[3], face_vert_tex_indicies[0], face_vert_tex_indicies[1]
- else: # length of 3
- if face_vert_loc_indicies[2]==0:
- face_vert_tex_indicies= face_vert_tex_indicies[1], face_vert_tex_indicies[2], face_vert_tex_indicies[0]
- # END EEEKADOODLE FIX
-
- # assign material, uv's and image
- blender_tface.uv1= verts_tex[face_vert_tex_indicies[0]]
- blender_tface.uv2= verts_tex[face_vert_tex_indicies[1]]
- blender_tface.uv3= verts_tex[face_vert_tex_indicies[2]]
-
- if len(face_vert_loc_indicies)==4:
- blender_tface.uv4= verts_tex[face_vert_tex_indicies[3]]
-
-# for ii, uv in enumerate(blender_face.uv):
-# uv.x, uv.y= verts_tex[face_vert_tex_indicies[ii]]
- del me_faces
-# del ALPHA
-
- if CREATE_EDGES:
-
- me.edges.add(len(edges))
-
- # edges should be a list of (a, b) tuples
- me.edges.foreach_set("vertices", unpack_list(edges))
-# me_edges.extend( edges )
-
-# del me_edges
-
- # Add edge faces.
-# me_edges= me.edges
-
- def edges_match(e1, e2):
- return (e1[0] == e2[0] and e1[1] == e2[1]) or (e1[0] == e2[1] and e1[1] == e2[0])
-
- # XXX slow
-# if CREATE_FGONS and fgon_edges:
-# for fgon_edge in fgon_edges.keys():
-# for ed in me.edges:
-# if edges_match(fgon_edge, ed.vertices):
-# ed.is_fgon = True
-
-# if CREATE_FGONS and fgon_edges:
-# FGON= Mesh.EdgeFlags.FGON
-# for ed in me.findEdges( fgon_edges.keys() ):
-# if ed!=None:
-# me_edges[ed].flag |= FGON
-# del FGON
-
- # XXX slow
-# if unique_smooth_groups and sharp_edges:
-# for sharp_edge in sharp_edges.keys():
-# for ed in me.edges:
-# if edges_match(sharp_edge, ed.vertices):
-# ed.use_edge_sharp = True
-
-# if unique_smooth_groups and sharp_edges:
-# SHARP= Mesh.EdgeFlags.SHARP
-# for ed in me.findEdges( sharp_edges.keys() ):
-# if ed!=None:
-# me_edges[ed].flag |= SHARP
-# del SHARP
-
- me.update()
-# me.calcNormals()
-
- ob= bpy.data.objects.new("Mesh", me)
- new_objects.append(ob)
-
- # Create the vertex groups. No need to have the flag passed here since we test for the
- # content of the vertex_groups. If the user selects to NOT have vertex groups saved then
- # the following test will never run
- for group_name, group_indicies in vertex_groups.items():
- group= ob.vertex_groups.new(group_name)
- ob.vertex_groups.assign(group_indicies, group, 1.0, 'REPLACE')
-
-
-def create_nurbs(context_nurbs, vert_loc, new_objects):
- '''
- Add nurbs object to blender, only support one type at the moment
- '''
- deg = context_nurbs.get('deg', (3,))
- curv_range = context_nurbs.get('curv_range')
- curv_idx = context_nurbs.get('curv_idx', [])
- parm_u = context_nurbs.get('parm_u', [])
- parm_v = context_nurbs.get('parm_v', [])
- name = context_nurbs.get('name', 'ObjNurb')
- cstype = context_nurbs.get('cstype')
-
- if cstype == None:
- print('\tWarning, cstype not found')
- return
- if cstype != 'bspline':
- print('\tWarning, cstype is not supported (only bspline)')
- return
- if not curv_idx:
- print('\tWarning, curv argument empty or not set')
- return
- if len(deg) > 1 or parm_v:
- print('\tWarning, surfaces not supported')
- return
-
- cu = bpy.data.curves.new(name, 'CURVE')
- cu.dimensions = '3D'
-
- nu = cu.splines.new('NURBS')
- nu.points.add(len(curv_idx) - 1) # a point is added to start with
- nu.points.foreach_set("co", [co_axis for vt_idx in curv_idx for co_axis in (vert_loc[vt_idx] + (1.0,))])
-
- nu.order_u = deg[0] + 1
-
- # get for endpoint flag from the weighting
- if curv_range and len(parm_u) > deg[0]+1:
- do_endpoints = True
- for i in range(deg[0]+1):
-
- if abs(parm_u[i]-curv_range[0]) > 0.0001:
- do_endpoints = False
- break
-
- if abs(parm_u[-(i+1)]-curv_range[1]) > 0.0001:
- do_endpoints = False
- break
-
- else:
- do_endpoints = False
-
- if do_endpoints:
- nu.use_endpoint_u = True
-
-
- # close
- '''
- do_closed = False
- if len(parm_u) > deg[0]+1:
- for i in xrange(deg[0]+1):
- #print curv_idx[i], curv_idx[-(i+1)]
-
- if curv_idx[i]==curv_idx[-(i+1)]:
- do_closed = True
- break
-
- if do_closed:
- nu.use_cyclic_u = True
- '''
-
- ob= bpy.data.objects.new("Nurb", cu)
-
- new_objects.append(ob)
-
-
-def strip_slash(line_split):
- if line_split[-1][-1]== '\\':
- if len(line_split[-1])==1:
- line_split.pop() # remove the \ item
- else:
- line_split[-1]= line_split[-1][:-1] # remove the \ from the end last number
- return True
- return False
-
-
-
-def get_float_func(filepath):
- '''
- find the float function for this obj file
- - whether to replace commas or not
- '''
- file= open(filepath, 'rU')
- for line in file: #.xreadlines():
- line = line.lstrip()
- if line.startswith('v'): # vn vt v
- if ',' in line:
- return lambda f: float(f.replace(',', '.'))
- elif '.' in line:
- return float
-
- # incase all vert values were ints
- return float
-
-def load(operator, context, filepath,
- CLAMP_SIZE= 0.0,
- CREATE_FGONS= True,
- CREATE_SMOOTH_GROUPS= True,
- CREATE_EDGES= True,
- SPLIT_OBJECTS= True,
- SPLIT_GROUPS= True,
- ROTATE_X90= True,
- IMAGE_SEARCH=True,
- POLYGROUPS=False):
- '''
- Called by the user interface or another script.
- load_obj(path) - should give acceptable results.
- This function passes the file and sends the data off
- to be split into objects and then converted into mesh objects
- '''
- print('\nimporting obj %r' % filepath)
-
- if SPLIT_OBJECTS or SPLIT_GROUPS:
- POLYGROUPS = False
-
- time_main= time.time()
-# time_main= sys.time()
-
- verts_loc= []
- verts_tex= []
- faces= [] # tuples of the faces
- material_libs= [] # filanems to material libs this uses
- vertex_groups = {} # when POLYGROUPS is true
-
- # Get the string to float conversion func for this file- is 'float' for almost all files.
- float_func= get_float_func(filepath)
-
- # Context variables
- context_material= None
- context_smooth_group= None
- context_object= None
- context_vgroup = None
-
- # Nurbs
- context_nurbs = {}
- nurbs = []
- context_parm = '' # used by nurbs too but could be used elsewhere
-
- has_ngons= False
- # has_smoothgroups= False - is explicit with len(unique_smooth_groups) being > 0
-
- # Until we can use sets
- unique_materials= {}
- unique_material_images= {}
- unique_smooth_groups= {}
- # unique_obects= {} - no use for this variable since the objects are stored in the face.
-
- # when there are faces that end with \
- # it means they are multiline-
- # since we use xreadline we cant skip to the next line
- # so we need to know whether
- context_multi_line= ''
-
- print("\tparsing obj file...")
- time_sub= time.time()
-# time_sub= sys.time()
-
- file= open(filepath, 'rU')
- for line in file: #.xreadlines():
- line = line.lstrip() # rare cases there is white space at the start of the line
-
- if line.startswith('v '):
- line_split= line.split()
- # rotate X90: (x,-z,y)
- verts_loc.append( (float_func(line_split[1]), -float_func(line_split[3]), float_func(line_split[2])) )
-
- elif line.startswith('vn '):
- pass
-
- elif line.startswith('vt '):
- line_split= line.split()
- verts_tex.append( (float_func(line_split[1]), float_func(line_split[2])) )
-
- # Handel faces lines (as faces) and the second+ lines of fa multiline face here
- # use 'f' not 'f ' because some objs (very rare have 'fo ' for faces)
- elif line.startswith('f') or context_multi_line == 'f':
-
- if context_multi_line:
- # use face_vert_loc_indicies and face_vert_tex_indicies previously defined and used the obj_face
- line_split= line.split()
-
- else:
- line_split= line[2:].split()
- face_vert_loc_indicies= []
- face_vert_tex_indicies= []
-
- # Instance a face
- faces.append((\
- face_vert_loc_indicies,\
- face_vert_tex_indicies,\
- context_material,\
- context_smooth_group,\
- context_object\
- ))
-
- if strip_slash(line_split):
- context_multi_line = 'f'
- else:
- context_multi_line = ''
-
- for v in line_split:
- obj_vert= v.split('/')
-
- vert_loc_index= int(obj_vert[0])-1
- # Add the vertex to the current group
- # *warning*, this wont work for files that have groups defined around verts
- if POLYGROUPS and context_vgroup:
- vertex_groups[context_vgroup].append(vert_loc_index)
-
- # Make relative negative vert indicies absolute
- if vert_loc_index < 0:
- vert_loc_index= len(verts_loc) + vert_loc_index + 1
-
- face_vert_loc_indicies.append(vert_loc_index)
-
- if len(obj_vert)>1 and obj_vert[1]:
- # formatting for faces with normals and textures us
- # loc_index/tex_index/nor_index
-
- vert_tex_index= int(obj_vert[1])-1
- # Make relative negative vert indicies absolute
- if vert_tex_index < 0:
- vert_tex_index= len(verts_tex) + vert_tex_index + 1
-
- face_vert_tex_indicies.append(vert_tex_index)
- else:
- # dummy
- face_vert_tex_indicies.append(0)
-
- if len(face_vert_loc_indicies) > 4:
- has_ngons= True
-
- elif CREATE_EDGES and (line.startswith('l ') or context_multi_line == 'l'):
- # very similar to the face load function above with some parts removed
-
- if context_multi_line:
- # use face_vert_loc_indicies and face_vert_tex_indicies previously defined and used the obj_face
- line_split= line.split()
-
- else:
- line_split= line[2:].split()
- face_vert_loc_indicies= []
- face_vert_tex_indicies= []
-
- # Instance a face
- faces.append((\
- face_vert_loc_indicies,\
- face_vert_tex_indicies,\
- context_material,\
- context_smooth_group,\
- context_object\
- ))
-
- if strip_slash(line_split):
- context_multi_line = 'l'
- else:
- context_multi_line = ''
-
- isline= line.startswith('l')
-
- for v in line_split:
- vert_loc_index= int(v)-1
-
- # Make relative negative vert indicies absolute
- if vert_loc_index < 0:
- vert_loc_index= len(verts_loc) + vert_loc_index + 1
-
- face_vert_loc_indicies.append(vert_loc_index)
-
- elif line.startswith('s'):
- if CREATE_SMOOTH_GROUPS:
- context_smooth_group= line_value(line.split())
- if context_smooth_group=='off':
- context_smooth_group= None
- elif context_smooth_group: # is not None
- unique_smooth_groups[context_smooth_group]= None
-
- elif line.startswith('o'):
- if SPLIT_OBJECTS:
- context_object= line_value(line.split())
- # unique_obects[context_object]= None
-
- elif line.startswith('g'):
- if SPLIT_GROUPS:
- context_object= line_value(line.split())
- # print 'context_object', context_object
- # unique_obects[context_object]= None
- elif POLYGROUPS:
- context_vgroup = line_value(line.split())
- if context_vgroup and context_vgroup != '(null)':
- vertex_groups.setdefault(context_vgroup, [])
- else:
- context_vgroup = None # dont assign a vgroup
-
- elif line.startswith('usemtl'):
- context_material= line_value(line.split())
- unique_materials[context_material]= None
- elif line.startswith('mtllib'): # usemap or usemat
- material_libs = list(set(material_libs) | set(line.split()[1:])) # can have multiple mtllib filenames per line, mtllib can appear more than once, so make sure only occurance of material exists
-
- # Nurbs support
- elif line.startswith('cstype '):
- context_nurbs['cstype']= line_value(line.split()) # 'rat bspline' / 'bspline'
- elif line.startswith('curv ') or context_multi_line == 'curv':
- line_split= line.split()
-
- curv_idx = context_nurbs['curv_idx'] = context_nurbs.get('curv_idx', []) # incase were multiline
-
- if not context_multi_line:
- context_nurbs['curv_range'] = float_func(line_split[1]), float_func(line_split[2])
- line_split[0:3] = [] # remove first 3 items
-
- if strip_slash(line_split):
- context_multi_line = 'curv'
- else:
- context_multi_line = ''
-
-
- for i in line_split:
- vert_loc_index = int(i)-1
-
- if vert_loc_index < 0:
- vert_loc_index= len(verts_loc) + vert_loc_index + 1
-
- curv_idx.append(vert_loc_index)
-
- elif line.startswith('parm') or context_multi_line == 'parm':
- line_split= line.split()
-
- if context_multi_line:
- context_multi_line = ''
- else:
- context_parm = line_split[1]
- line_split[0:2] = [] # remove first 2
-
- if strip_slash(line_split):
- context_multi_line = 'parm'
- else:
- context_multi_line = ''
-
- if context_parm.lower() == 'u':
- context_nurbs.setdefault('parm_u', []).extend( [float_func(f) for f in line_split] )
- elif context_parm.lower() == 'v': # surfaces not suported yet
- context_nurbs.setdefault('parm_v', []).extend( [float_func(f) for f in line_split] )
- # else: # may want to support other parm's ?
-
- elif line.startswith('deg '):
- context_nurbs['deg']= [int(i) for i in line.split()[1:]]
- elif line.startswith('end'):
- # Add the nurbs curve
- if context_object:
- context_nurbs['name'] = context_object
- nurbs.append(context_nurbs)
- context_nurbs = {}
- context_parm = ''
-
- ''' # How to use usemap? depricated?
- elif line.startswith('usema'): # usemap or usemat
- context_image= line_value(line.split())
- '''
-
- file.close()
- time_new= time.time()
-# time_new= sys.time()
- print('%.4f sec' % (time_new-time_sub))
- time_sub= time_new
-
-
- print('\tloading materials and images...')
- create_materials(filepath, material_libs, unique_materials, unique_material_images, IMAGE_SEARCH)
-
- time_new= time.time()
-# time_new= sys.time()
- print('%.4f sec' % (time_new-time_sub))
- time_sub= time_new
-
- if not ROTATE_X90:
- verts_loc[:] = [(v[0], v[2], -v[1]) for v in verts_loc]
-
- # deselect all
- bpy.ops.object.select_all(action='DESELECT')
-
- scene = context.scene
-# scn.objects.selected = []
- new_objects= [] # put new objects here
-
- print('\tbuilding geometry...\n\tverts:%i faces:%i materials: %i smoothgroups:%i ...' % ( len(verts_loc), len(faces), len(unique_materials), len(unique_smooth_groups) ))
- # Split the mesh by objects/materials, may
- if SPLIT_OBJECTS or SPLIT_GROUPS: SPLIT_OB_OR_GROUP = True
- else: SPLIT_OB_OR_GROUP = False
-
- for verts_loc_split, faces_split, unique_materials_split, dataname in split_mesh(verts_loc, faces, unique_materials, filepath, SPLIT_OB_OR_GROUP):
- # Create meshes from the data, warning 'vertex_groups' wont support splitting
- create_mesh(new_objects, has_ngons, CREATE_FGONS, CREATE_EDGES, verts_loc_split, verts_tex, faces_split, unique_materials_split, unique_material_images, unique_smooth_groups, vertex_groups, dataname)
-
- # nurbs support
- for context_nurbs in nurbs:
- create_nurbs(context_nurbs, verts_loc, new_objects)
-
- # Create new obj
- for obj in new_objects:
- base = scene.objects.link(obj)
- base.select = True
-
- scene.update()
-
-
- axis_min= [ 1000000000]*3
- axis_max= [-1000000000]*3
-
-# if CLAMP_SIZE:
-# # Get all object bounds
-# for ob in new_objects:
-# for v in ob.getBoundBox():
-# for axis, value in enumerate(v):
-# if axis_min[axis] > value: axis_min[axis]= value
-# if axis_max[axis] < value: axis_max[axis]= value
-
-# # Scale objects
-# max_axis= max(axis_max[0]-axis_min[0], axis_max[1]-axis_min[1], axis_max[2]-axis_min[2])
-# scale= 1.0
-
-# while CLAMP_SIZE < max_axis * scale:
-# scale= scale/10.0
-
-# for ob in new_objects:
-# ob.setSize(scale, scale, scale)
-
- # Better rotate the vert locations
- #if not ROTATE_X90:
- # for ob in new_objects:
- # ob.RotX = -1.570796326794896558
-
- time_new= time.time()
-# time_new= sys.time()
-
- print('finished importing: %r in %.4f sec.' % (filepath, (time_new-time_main)))
- return {'FINISHED'}
-
-
-# NOTES (all line numbers refer to 2.4x import_obj.py, not this file)
-# check later: line 489
-# can convert now: edge flags, edges: lines 508-528
-# ngon (uses python module BPyMesh): 384-414
-# NEXT clamp size: get bound box with RNA
-# get back to l 140 (here)
-# search image in bpy.config.textureDir - load_image
-# replaced BPyImage.comprehensiveImageLoad with a simplified version that only checks additional directory specified, but doesn't search dirs recursively (obj_image_load)
-# bitmask won't work? - 132
-# uses bpy.sys.time()
-
-if __name__ == "__main__":
- register()