Welcome to mirror list, hosted at ThFree Co, Russian Federation.

git.blender.org/blender-addons.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorBastien Montagne <montagne29@wanadoo.fr>2015-07-11 17:45:54 +0300
committerBastien Montagne <montagne29@wanadoo.fr>2015-07-11 17:45:54 +0300
commit3cc970eaf5c896b8103a88d959ebb0776e4ad590 (patch)
treec377b9479eaa2794b89a481845993112eebf90ea /io_scene_obj/export_obj.py
parent7194d2205ecb9920449cc5fac5b0f9418be5c173 (diff)
OBJ IO: add some progress report for user.
Use new 'progress_report' util module to do both a (basic) report of I/O progression, together with some logging/timing of main steps. We could probably go much further, but for now it will do. Also, using files as context manager, too!
Diffstat (limited to 'io_scene_obj/export_obj.py')
-rw-r--r--io_scene_obj/export_obj.py1061
1 files changed, 535 insertions, 526 deletions
diff --git a/io_scene_obj/export_obj.py b/io_scene_obj/export_obj.py
index d47555c1..7399c2e1 100644
--- a/io_scene_obj/export_obj.py
+++ b/io_scene_obj/export_obj.py
@@ -19,12 +19,13 @@
# <pep8 compliant>
import os
-import time
import bpy
import mathutils
import bpy_extras.io_utils
+from progress_report import ProgressReport, ProgressReportSubstep
+
def name_compat(name):
if name is None:
@@ -54,132 +55,129 @@ def write_mtl(scene, filepath, path_mode, copy_set, mtl_dict):
source_dir = os.path.dirname(bpy.data.filepath)
dest_dir = os.path.dirname(filepath)
- file = open(filepath, "w", encoding="utf8", newline="\n")
- fw = file.write
-
- fw('# Blender MTL File: %r\n' % (os.path.basename(bpy.data.filepath) or "None"))
- fw('# Material Count: %i\n' % len(mtl_dict))
+ with open(filepath, "w", encoding="utf8", newline="\n") as f:
+ fw = f.write
- mtl_dict_values = list(mtl_dict.values())
- mtl_dict_values.sort(key=lambda m: m[0])
+ fw('# Blender MTL File: %r\n' % (os.path.basename(bpy.data.filepath) or "None"))
+ fw('# Material Count: %i\n' % len(mtl_dict))
- # Write material/image combinations we have used.
- # Using mtl_dict.values() directly gives un-predictable order.
- for mtl_mat_name, mat, face_img in mtl_dict_values:
+ mtl_dict_values = list(mtl_dict.values())
+ mtl_dict_values.sort(key=lambda m: m[0])
- # Get the Blender data for the material and the image.
- # Having an image named None will make a bug, dont do it :)
+ # Write material/image combinations we have used.
+ # Using mtl_dict.values() directly gives un-predictable order.
+ for mtl_mat_name, mat, face_img in mtl_dict_values:
+ # Get the Blender data for the material and the image.
+ # Having an image named None will make a bug, dont do it :)
- fw('\nnewmtl %s\n' % mtl_mat_name) # Define a new material: matname_imgname
+ fw('\nnewmtl %s\n' % mtl_mat_name) # Define a new material: matname_imgname
- if mat:
- use_mirror = mat.raytrace_mirror.use and mat.raytrace_mirror.reflect_factor != 0.0
+ if mat:
+ use_mirror = mat.raytrace_mirror.use and mat.raytrace_mirror.reflect_factor != 0.0
- # convert from blenders spec to 0 - 1000 range.
- if mat.specular_shader == 'WARDISO':
- tspec = (0.4 - mat.specular_slope) / 0.0004
- else:
- tspec = (mat.specular_hardness - 1) / 0.51
- fw('Ns %.6f\n' % tspec)
- del tspec
+ # convert from blenders spec to 0 - 1000 range.
+ if mat.specular_shader == 'WARDISO':
+ tspec = (0.4 - mat.specular_slope) / 0.0004
+ else:
+ tspec = (mat.specular_hardness - 1) / 0.51
+ fw('Ns %.6f\n' % tspec)
+ del tspec
- # Ambient
- if use_mirror:
- fw('Ka %.6f %.6f %.6f\n' % (mat.raytrace_mirror.reflect_factor * mat.mirror_color)[:])
- else:
- fw('Ka %.6f %.6f %.6f\n' % (mat.ambient, mat.ambient, mat.ambient)) # Do not use world color!
- fw('Kd %.6f %.6f %.6f\n' % (mat.diffuse_intensity * mat.diffuse_color)[:]) # Diffuse
- fw('Ks %.6f %.6f %.6f\n' % (mat.specular_intensity * mat.specular_color)[:]) # Specular
- if hasattr(mat, "raytrace_transparency") and hasattr(mat.raytrace_transparency, "ior"):
- fw('Ni %.6f\n' % mat.raytrace_transparency.ior) # Refraction index
- else:
- fw('Ni %.6f\n' % 1.0)
- fw('d %.6f\n' % mat.alpha) # Alpha (obj uses 'd' for dissolve)
-
- # See http://en.wikipedia.org/wiki/Wavefront_.obj_file for whole list of values...
- # Note that mapping is rather fuzzy sometimes, trying to do our best here.
- if mat.use_shadeless:
- fw('illum 0\n') # ignore lighting
- elif mat.specular_intensity == 0:
- fw('illum 1\n') # no specular.
- elif use_mirror:
- if mat.use_transparency and mat.transparency_method == 'RAYTRACE':
- if mat.raytrace_mirror.fresnel != 0.0:
- fw('illum 7\n') # Reflection, Transparency, Ray trace and Fresnel
+ # Ambient
+ if use_mirror:
+ fw('Ka %.6f %.6f %.6f\n' % (mat.raytrace_mirror.reflect_factor * mat.mirror_color)[:])
+ else:
+ fw('Ka %.6f %.6f %.6f\n' % (mat.ambient, mat.ambient, mat.ambient)) # Do not use world color!
+ fw('Kd %.6f %.6f %.6f\n' % (mat.diffuse_intensity * mat.diffuse_color)[:]) # Diffuse
+ fw('Ks %.6f %.6f %.6f\n' % (mat.specular_intensity * mat.specular_color)[:]) # Specular
+ if hasattr(mat, "raytrace_transparency") and hasattr(mat.raytrace_transparency, "ior"):
+ fw('Ni %.6f\n' % mat.raytrace_transparency.ior) # Refraction index
+ else:
+ fw('Ni %.6f\n' % 1.0)
+ fw('d %.6f\n' % mat.alpha) # Alpha (obj uses 'd' for dissolve)
+
+ # See http://en.wikipedia.org/wiki/Wavefront_.obj_file for whole list of values...
+ # Note that mapping is rather fuzzy sometimes, trying to do our best here.
+ if mat.use_shadeless:
+ fw('illum 0\n') # ignore lighting
+ elif mat.specular_intensity == 0:
+ fw('illum 1\n') # no specular.
+ elif use_mirror:
+ if mat.use_transparency and mat.transparency_method == 'RAYTRACE':
+ if mat.raytrace_mirror.fresnel != 0.0:
+ fw('illum 7\n') # Reflection, Transparency, Ray trace and Fresnel
+ else:
+ fw('illum 6\n') # Reflection, Transparency, Ray trace
+ elif mat.raytrace_mirror.fresnel != 0.0:
+ fw('illum 5\n') # Reflection, Ray trace and Fresnel
else:
- fw('illum 6\n') # Reflection, Transparency, Ray trace
- elif mat.raytrace_mirror.fresnel != 0.0:
- fw('illum 5\n') # Reflection, Ray trace and Fresnel
+ fw('illum 3\n') # Reflection and Ray trace
+ elif mat.use_transparency and mat.transparency_method == 'RAYTRACE':
+ fw('illum 9\n') # 'Glass' transparency and no Ray trace reflection... fuzzy matching, but...
else:
- fw('illum 3\n') # Reflection and Ray trace
- elif mat.use_transparency and mat.transparency_method == 'RAYTRACE':
- fw('illum 9\n') # 'Glass' transparency and no Ray trace reflection... fuzzy matching, but...
+ fw('illum 2\n') # light normaly
+
else:
+ # Write a dummy material here?
+ fw('Ns 0\n')
+ fw('Ka %.6f %.6f %.6f\n' % world_amb[:]) # Ambient, uses mirror color,
+ fw('Kd 0.8 0.8 0.8\n')
+ fw('Ks 0.8 0.8 0.8\n')
+ fw('d 1\n') # No alpha
fw('illum 2\n') # light normaly
- else:
- # Write a dummy material here?
- fw('Ns 0\n')
- fw('Ka %.6f %.6f %.6f\n' % world_amb[:]) # Ambient, uses mirror color,
- fw('Kd 0.8 0.8 0.8\n')
- fw('Ks 0.8 0.8 0.8\n')
- fw('d 1\n') # No alpha
- fw('illum 2\n') # light normaly
-
- # Write images!
- if face_img: # We have an image on the face!
- filepath = face_img.filepath
- if filepath: # may be '' for generated images
- # write relative image path
- filepath = bpy_extras.io_utils.path_reference(filepath, source_dir, dest_dir,
- path_mode, "", copy_set, face_img.library)
- fw('map_Kd %s\n' % filepath) # Diffuse mapping image
- del filepath
- else:
- # so we write the materials image.
- face_img = None
-
- if mat: # No face image. if we havea material search for MTex image.
- image_map = {}
- # backwards so topmost are highest priority
- for mtex in reversed(mat.texture_slots):
- if mtex and mtex.texture and mtex.texture.type == 'IMAGE':
- image = mtex.texture.image
- if image:
- # texface overrides others
- if (mtex.use_map_color_diffuse and (face_img is None) and
- (mtex.use_map_warp is False) and (mtex.texture_coords != 'REFLECTION')):
- image_map["map_Kd"] = image
- if mtex.use_map_ambient:
- image_map["map_Ka"] = image
- # this is the Spec intensity channel but Ks stands for specular Color
- '''
- if mtex.use_map_specular:
- image_map["map_Ks"] = image
- '''
- if mtex.use_map_color_spec: # specular color
- image_map["map_Ks"] = image
- if mtex.use_map_hardness: # specular hardness/glossiness
- image_map["map_Ns"] = image
- if mtex.use_map_alpha:
- image_map["map_d"] = image
- if mtex.use_map_translucency:
- image_map["map_Tr"] = image
- if mtex.use_map_normal:
- image_map["map_Bump"] = image
- if mtex.use_map_displacement:
- image_map["disp"] = image
- if mtex.use_map_color_diffuse and (mtex.texture_coords == 'REFLECTION'):
- image_map["refl"] = image
- if mtex.use_map_emit:
- image_map["map_Ke"] = image
-
- for key, image in sorted(image_map.items()):
- filepath = bpy_extras.io_utils.path_reference(image.filepath, source_dir, dest_dir,
- path_mode, "", copy_set, image.library)
- fw('%s %s\n' % (key, repr(filepath)[1:-1]))
-
- file.close()
+ # Write images!
+ if face_img: # We have an image on the face!
+ filepath = face_img.filepath
+ if filepath: # may be '' for generated images
+ # write relative image path
+ filepath = bpy_extras.io_utils.path_reference(filepath, source_dir, dest_dir,
+ path_mode, "", copy_set, face_img.library)
+ fw('map_Kd %s\n' % filepath) # Diffuse mapping image
+ del filepath
+ else:
+ # so we write the materials image.
+ face_img = None
+
+ if mat: # No face image. if we havea material search for MTex image.
+ image_map = {}
+ # backwards so topmost are highest priority
+ for mtex in reversed(mat.texture_slots):
+ if mtex and mtex.texture and mtex.texture.type == 'IMAGE':
+ image = mtex.texture.image
+ if image:
+ # texface overrides others
+ if (mtex.use_map_color_diffuse and (face_img is None) and
+ (mtex.use_map_warp is False) and (mtex.texture_coords != 'REFLECTION')):
+ image_map["map_Kd"] = image
+ if mtex.use_map_ambient:
+ image_map["map_Ka"] = image
+ # this is the Spec intensity channel but Ks stands for specular Color
+ '''
+ if mtex.use_map_specular:
+ image_map["map_Ks"] = image
+ '''
+ if mtex.use_map_color_spec: # specular color
+ image_map["map_Ks"] = image
+ if mtex.use_map_hardness: # specular hardness/glossiness
+ image_map["map_Ns"] = image
+ if mtex.use_map_alpha:
+ image_map["map_d"] = image
+ if mtex.use_map_translucency:
+ image_map["map_Tr"] = image
+ if mtex.use_map_normal:
+ image_map["map_Bump"] = image
+ if mtex.use_map_displacement:
+ image_map["disp"] = image
+ if mtex.use_map_color_diffuse and (mtex.texture_coords == 'REFLECTION'):
+ image_map["refl"] = image
+ if mtex.use_map_emit:
+ image_map["map_Ke"] = image
+
+ for key, image in sorted(image_map.items()):
+ filepath = bpy_extras.io_utils.path_reference(image.filepath, source_dir, dest_dir,
+ path_mode, "", copy_set, image.library)
+ fw('%s %s\n' % (key, repr(filepath)[1:-1]))
def test_nurbs_compat(ob):
@@ -276,6 +274,7 @@ def write_file(filepath, objects, scene,
EXPORT_CURVE_AS_NURBS=True,
EXPORT_GLOBAL_MATRIX=None,
EXPORT_PATH_MODE='AUTO',
+ progress=ProgressReport(),
):
"""
Basic write function. The context and options must be already set
@@ -283,7 +282,6 @@ def write_file(filepath, objects, scene,
eg.
write( 'c:\\test\\foobar.obj', Blender.Object.GetSelected() ) # Using default options.
"""
-
if EXPORT_GLOBAL_MATRIX is None:
EXPORT_GLOBAL_MATRIX = mathutils.Matrix()
@@ -313,378 +311,387 @@ def write_file(filepath, objects, scene,
else:
return '(null)'
- print('OBJ Export path: %r' % filepath)
-
- time1 = time.time()
-
- file = open(filepath, "w", encoding="utf8", newline="\n")
- fw = file.write
-
- # Write Header
- fw('# Blender v%s OBJ File: %r\n' % (bpy.app.version_string, os.path.basename(bpy.data.filepath)))
- fw('# www.blender.org\n')
-
- # Tell the obj file what material file to use.
- if EXPORT_MTL:
- mtlfilepath = os.path.splitext(filepath)[0] + ".mtl"
- fw('mtllib %s\n' % repr(os.path.basename(mtlfilepath))[1:-1]) # filepath can contain non utf8 chars, use repr
-
- # Initialize totals, these are updated each object
- totverts = totuvco = totno = 1
-
- face_vert_index = 1
-
- # A Dict of Materials
- # (material.name, image.name):matname_imagename # matname_imagename has gaps removed.
- mtl_dict = {}
- # Used to reduce the usage of matname_texname materials, which can become annoying in case of
- # repeated exports/imports, yet keeping unique mat names per keys!
- # mtl_name: (material.name, image.name)
- mtl_rev_dict = {}
-
- copy_set = set()
-
- # Get all meshes
- for ob_main in objects:
-
- # ignore dupli children
- if ob_main.parent and ob_main.parent.dupli_type in {'VERTS', 'FACES'}:
- # XXX
- print(ob_main.name, 'is a dupli child - ignoring')
- continue
-
- obs = []
- if ob_main.dupli_type != 'NONE':
- # XXX
- print('creating dupli_list on', ob_main.name)
- ob_main.dupli_list_create(scene)
-
- obs = [(dob.object, dob.matrix) for dob in ob_main.dupli_list]
-
- # XXX debug print
- print(ob_main.name, 'has', len(obs), 'dupli children')
- else:
- obs = [(ob_main, ob_main.matrix_world)]
-
- for ob, ob_mat in obs:
- uv_unique_count = no_unique_count = 0
-
- # Nurbs curve support
- if EXPORT_CURVE_AS_NURBS and test_nurbs_compat(ob):
- ob_mat = EXPORT_GLOBAL_MATRIX * ob_mat
- totverts += write_nurb(fw, ob, ob_mat)
- continue
- # END NURBS
-
- try:
- me = ob.to_mesh(scene, EXPORT_APPLY_MODIFIERS, 'PREVIEW', calc_tessface=False)
- except RuntimeError:
- me = None
-
- if me is None:
- continue
-
- me.transform(EXPORT_GLOBAL_MATRIX * ob_mat)
-
- if EXPORT_TRI:
- # _must_ do this first since it re-allocs arrays
- mesh_triangulate(me)
-
- if EXPORT_UV:
- faceuv = len(me.uv_textures) > 0
- if faceuv:
- uv_texture = me.uv_textures.active.data[:]
- uv_layer = me.uv_layers.active.data[:]
- else:
- faceuv = False
-
- me_verts = me.vertices[:]
-
- # Make our own list so it can be sorted to reduce context switching
- face_index_pairs = [(face, index) for index, face in enumerate(me.polygons)]
- # faces = [ f for f in me.tessfaces ]
-
- if EXPORT_EDGES:
- edges = me.edges
- else:
- edges = []
-
- if not (len(face_index_pairs) + len(edges) + len(me.vertices)): # Make sure there is somthing to write
-
- # clean up
- bpy.data.meshes.remove(me)
-
- continue # dont bother with this mesh.
-
- if EXPORT_NORMALS and face_index_pairs:
- me.calc_normals_split()
- # No need to call me.free_normals_split later, as this mesh is deleted anyway!
- loops = me.loops
- else:
- loops = []
-
- if (EXPORT_SMOOTH_GROUPS or EXPORT_SMOOTH_GROUPS_BITFLAGS) and face_index_pairs:
- smooth_groups, smooth_groups_tot = me.calc_smooth_groups(EXPORT_SMOOTH_GROUPS_BITFLAGS)
- if smooth_groups_tot <= 1:
- smooth_groups, smooth_groups_tot = (), 0
- else:
- smooth_groups, smooth_groups_tot = (), 0
-
- materials = me.materials[:]
- material_names = [m.name if m else None for m in materials]
-
- # avoid bad index errors
- if not materials:
- materials = [None]
- material_names = [name_compat(None)]
-
- # Sort by Material, then images
- # so we dont over context switch in the obj file.
- if EXPORT_KEEP_VERT_ORDER:
- pass
- else:
- if faceuv:
- if smooth_groups:
- sort_func = lambda a: (a[0].material_index,
- hash(uv_texture[a[1]].image),
- smooth_groups[a[1]] if a[0].use_smooth else False)
- else:
- sort_func = lambda a: (a[0].material_index,
- hash(uv_texture[a[1]].image),
- a[0].use_smooth)
- elif len(materials) > 1:
- if smooth_groups:
- sort_func = lambda a: (a[0].material_index,
- smooth_groups[a[1]] if a[0].use_smooth else False)
- else:
- sort_func = lambda a: (a[0].material_index,
- a[0].use_smooth)
- else:
- # no materials
- if smooth_groups:
- sort_func = lambda a: smooth_groups[a[1] if a[0].use_smooth else False]
- else:
- sort_func = lambda a: a[0].use_smooth
-
- face_index_pairs.sort(key=sort_func)
-
- del sort_func
-
- # Set the default mat to no material and no image.
- contextMat = 0, 0 # Can never be this, so we will label a new material the first chance we get.
- contextSmooth = None # Will either be true or false, set bad to force initialization switch.
-
- if EXPORT_BLEN_OBS or EXPORT_GROUP_BY_OB:
- name1 = ob.name
- name2 = ob.data.name
- if name1 == name2:
- obnamestring = name_compat(name1)
- else:
- obnamestring = '%s_%s' % (name_compat(name1), name_compat(name2))
-
- if EXPORT_BLEN_OBS:
- fw('o %s\n' % obnamestring) # Write Object name
- else: # if EXPORT_GROUP_BY_OB:
- fw('g %s\n' % obnamestring)
-
- # Vert
- for v in me_verts:
- fw('v %.6f %.6f %.6f\n' % v.co[:])
-
- # UV
- if faceuv:
- # in case removing some of these dont get defined.
- uv = f_index = uv_index = uv_key = uv_val = uv_ls = None
-
- uv_face_mapping = [None] * len(face_index_pairs)
-
- uv_dict = {}
- uv_get = uv_dict.get
- for f, f_index in face_index_pairs:
- uv_ls = uv_face_mapping[f_index] = []
- for uv_index, l_index in enumerate(f.loop_indices):
- uv = uv_layer[l_index].uv
- uv_key = veckey2d(uv)
- uv_val = uv_get(uv_key)
- if uv_val is None:
- uv_val = uv_dict[uv_key] = uv_unique_count
- fw('vt %.6f %.6f\n' % uv[:])
- uv_unique_count += 1
- uv_ls.append(uv_val)
-
- del uv_dict, uv, f_index, uv_index, uv_ls, uv_get, uv_key, uv_val
- # Only need uv_unique_count and uv_face_mapping
-
- # NORMAL, Smooth/Non smoothed.
- if EXPORT_NORMALS:
- no_key = no_val = None
- normals_to_idx = {}
- no_get = normals_to_idx.get
- loops_to_normals = [0] * len(loops)
- for f, f_index in face_index_pairs:
- for l_idx in f.loop_indices:
- no_key = veckey3d(loops[l_idx].normal)
- no_val = no_get(no_key)
- if no_val is None:
- no_val = normals_to_idx[no_key] = no_unique_count
- fw('vn %.6f %.6f %.6f\n' % no_key)
- no_unique_count += 1
- loops_to_normals[l_idx] = no_val
- del normals_to_idx, no_get, no_key, no_val
- else:
- loops_to_normals = []
-
- if not faceuv:
- f_image = None
-
- # XXX
- if EXPORT_POLYGROUPS:
- # Retrieve the list of vertex groups
- vertGroupNames = ob.vertex_groups.keys()
- if vertGroupNames:
- currentVGroup = ''
- # Create a dictionary keyed by face id and listing, for each vertex, the vertex groups it belongs to
- vgroupsMap = [[] for _i in range(len(me_verts))]
- for v_idx, v_ls in enumerate(vgroupsMap):
- v_ls[:] = [(vertGroupNames[g.group], g.weight) for g in me_verts[v_idx].groups]
-
- for f, f_index in face_index_pairs:
- f_smooth = f.use_smooth
- if f_smooth and smooth_groups:
- f_smooth = smooth_groups[f_index]
- f_mat = min(f.material_index, len(materials) - 1)
-
- if faceuv:
- tface = uv_texture[f_index]
- f_image = tface.image
-
- # MAKE KEY
- if faceuv and f_image: # Object is always true.
- key = material_names[f_mat], f_image.name
+ with ProgressReportSubstep(progress, 2, "OBJ Export path: %r" % filepath, "OBJ Export Finished") as subprogress1:
+ with open(filepath, "w", encoding="utf8", newline="\n") as f:
+ fw = f.write
+
+ # Write Header
+ fw('# Blender v%s OBJ File: %r\n' % (bpy.app.version_string, os.path.basename(bpy.data.filepath)))
+ fw('# www.blender.org\n')
+
+ # Tell the obj file what material file to use.
+ if EXPORT_MTL:
+ mtlfilepath = os.path.splitext(filepath)[0] + ".mtl"
+ # filepath can contain non utf8 chars, use repr
+ fw('mtllib %s\n' % repr(os.path.basename(mtlfilepath))[1:-1])
+
+ # Initialize totals, these are updated each object
+ totverts = totuvco = totno = 1
+
+ face_vert_index = 1
+
+ # A Dict of Materials
+ # (material.name, image.name):matname_imagename # matname_imagename has gaps removed.
+ mtl_dict = {}
+ # Used to reduce the usage of matname_texname materials, which can become annoying in case of
+ # repeated exports/imports, yet keeping unique mat names per keys!
+ # mtl_name: (material.name, image.name)
+ mtl_rev_dict = {}
+
+ copy_set = set()
+
+ # Get all meshes
+ subprogress1.enter_substeps(len(objects))
+ for i, ob_main in enumerate(objects):
+ # ignore dupli children
+ if ob_main.parent and ob_main.parent.dupli_type in {'VERTS', 'FACES'}:
+ # XXX
+ subprogress1.step("Ignoring %s, dupli child..." % ob_main.name)
+ continue
+
+ obs = []
+ if ob_main.dupli_type != 'NONE':
+ # XXX
+ print('creating dupli_list on', ob_main.name)
+ ob_main.dupli_list_create(scene)
+
+ obs = [(dob.object, dob.matrix) for dob in ob_main.dupli_list]
+
+ # XXX debug print
+ print(ob_main.name, 'has', len(obs), 'dupli children')
else:
- key = material_names[f_mat], None # No image, use None instead.
-
- # Write the vertex group
- if EXPORT_POLYGROUPS:
- if vertGroupNames:
- # find what vertext group the face belongs to
- vgroup_of_face = findVertexGroupName(f, vgroupsMap)
- if vgroup_of_face != currentVGroup:
- currentVGroup = vgroup_of_face
- fw('g %s\n' % vgroup_of_face)
-
- # CHECK FOR CONTEXT SWITCH
- if key == contextMat:
- pass # Context already switched, dont do anything
- else:
- if key[0] is None and key[1] is None:
- # Write a null material, since we know the context has changed.
- if EXPORT_GROUP_BY_MAT:
- # can be mat_image or (null)
- fw("g %s_%s\n" % (name_compat(ob.name), name_compat(ob.data.name)))
- if EXPORT_MTL:
- fw("usemtl (null)\n") # mat, image
-
- else:
- mat_data = mtl_dict.get(key)
- if not mat_data:
- # First add to global dict so we can export to mtl
- # Then write mtl
-
- # Make a new names from the mat and image name,
- # converting any spaces to underscores with name_compat.
-
- # If none image dont bother adding it to the name
- # Try to avoid as much as possible adding texname (or other things)
- # to the mtl name (see [#32102])...
- mtl_name = "%s" % name_compat(key[0])
- if mtl_rev_dict.get(mtl_name, None) not in {key, None}:
- if key[1] is None:
- tmp_ext = "_NONE"
- else:
- tmp_ext = "_%s" % name_compat(key[1])
- i = 0
- while mtl_rev_dict.get(mtl_name + tmp_ext, None) not in {key, None}:
- i += 1
- tmp_ext = "_%3d" % i
- mtl_name += tmp_ext
- mat_data = mtl_dict[key] = mtl_name, materials[f_mat], f_image
- mtl_rev_dict[mtl_name] = key
-
- if EXPORT_GROUP_BY_MAT:
- # can be mat_image or (null)
- fw("g %s_%s_%s\n" % (name_compat(ob.name), name_compat(ob.data.name), mat_data[0]))
- if EXPORT_MTL:
- fw("usemtl %s\n" % mat_data[0]) # can be mat_image or (null)
-
- contextMat = key
- if f_smooth != contextSmooth:
- if f_smooth: # on now off
- if smooth_groups:
- f_smooth = smooth_groups[f_index]
- fw('s %d\n' % f_smooth)
+ obs = [(ob_main, ob_main.matrix_world)]
+
+ subprogress1.enter_substeps(len(obs))
+ for ob, ob_mat in obs:
+ with ProgressReportSubstep(subprogress1, 6) as subprogress2:
+ uv_unique_count = no_unique_count = 0
+
+ # Nurbs curve support
+ if EXPORT_CURVE_AS_NURBS and test_nurbs_compat(ob):
+ ob_mat = EXPORT_GLOBAL_MATRIX * ob_mat
+ totverts += write_nurb(fw, ob, ob_mat)
+ continue
+ # END NURBS
+
+ try:
+ me = ob.to_mesh(scene, EXPORT_APPLY_MODIFIERS, 'PREVIEW', calc_tessface=False)
+ except RuntimeError:
+ me = None
+
+ if me is None:
+ continue
+
+ me.transform(EXPORT_GLOBAL_MATRIX * ob_mat)
+
+ if EXPORT_TRI:
+ # _must_ do this first since it re-allocs arrays
+ mesh_triangulate(me)
+
+ if EXPORT_UV:
+ faceuv = len(me.uv_textures) > 0
+ if faceuv:
+ uv_texture = me.uv_textures.active.data[:]
+ uv_layer = me.uv_layers.active.data[:]
else:
- fw('s 1\n')
- else: # was off now on
- fw('s off\n')
- contextSmooth = f_smooth
-
- f_v = [(vi, me_verts[v_idx], l_idx)
- for vi, (v_idx, l_idx) in enumerate(zip(f.vertices, f.loop_indices))]
+ faceuv = False
- fw('f')
- if faceuv:
- if EXPORT_NORMALS:
- for vi, v, li in f_v:
- fw(" %d/%d/%d" % (totverts + v.index,
- totuvco + uv_face_mapping[f_index][vi],
- totno + loops_to_normals[li],
- )) # vert, uv, normal
- else: # No Normals
- for vi, v, li in f_v:
- fw(" %d/%d" % (totverts + v.index,
- totuvco + uv_face_mapping[f_index][vi],
- )) # vert, uv
+ me_verts = me.vertices[:]
- face_vert_index += len(f_v)
+ # Make our own list so it can be sorted to reduce context switching
+ face_index_pairs = [(face, index) for index, face in enumerate(me.polygons)]
+ # faces = [ f for f in me.tessfaces ]
- else: # No UV's
- if EXPORT_NORMALS:
- for vi, v, li in f_v:
- fw(" %d//%d" % (totverts + v.index, totno + loops_to_normals[li]))
- else: # No Normals
- for vi, v, li in f_v:
- fw(" %d" % (totverts + v.index))
-
- fw('\n')
-
- # Write edges.
- if EXPORT_EDGES:
- for ed in edges:
- if ed.is_loose:
- fw('l %d %d\n' % (totverts + ed.vertices[0], totverts + ed.vertices[1]))
+ if EXPORT_EDGES:
+ edges = me.edges
+ else:
+ edges = []
- # Make the indices global rather then per mesh
- totverts += len(me_verts)
- totuvco += uv_unique_count
- totno += no_unique_count
+ if not (len(face_index_pairs) + len(edges) + len(me.vertices)): # Make sure there is something to write
+ # clean up
+ bpy.data.meshes.remove(me)
+ continue # dont bother with this mesh.
- # clean up
- bpy.data.meshes.remove(me)
+ if EXPORT_NORMALS and face_index_pairs:
+ me.calc_normals_split()
+ # No need to call me.free_normals_split later, as this mesh is deleted anyway!
+ loops = me.loops
+ else:
+ loops = []
- if ob_main.dupli_type != 'NONE':
- ob_main.dupli_list_clear()
+ if (EXPORT_SMOOTH_GROUPS or EXPORT_SMOOTH_GROUPS_BITFLAGS) and face_index_pairs:
+ smooth_groups, smooth_groups_tot = me.calc_smooth_groups(EXPORT_SMOOTH_GROUPS_BITFLAGS)
+ if smooth_groups_tot <= 1:
+ smooth_groups, smooth_groups_tot = (), 0
+ else:
+ smooth_groups, smooth_groups_tot = (), 0
- file.close()
+ materials = me.materials[:]
+ material_names = [m.name if m else None for m in materials]
- # Now we have all our materials, save them
- if EXPORT_MTL:
- write_mtl(scene, mtlfilepath, EXPORT_PATH_MODE, copy_set, mtl_dict)
+ # avoid bad index errors
+ if not materials:
+ materials = [None]
+ material_names = [name_compat(None)]
- # copy all collected files.
- bpy_extras.io_utils.path_reference_copy(copy_set)
+ # Sort by Material, then images
+ # so we dont over context switch in the obj file.
+ if EXPORT_KEEP_VERT_ORDER:
+ pass
+ else:
+ if faceuv:
+ if smooth_groups:
+ sort_func = lambda a: (a[0].material_index,
+ hash(uv_texture[a[1]].image),
+ smooth_groups[a[1]] if a[0].use_smooth else False)
+ else:
+ sort_func = lambda a: (a[0].material_index,
+ hash(uv_texture[a[1]].image),
+ a[0].use_smooth)
+ elif len(materials) > 1:
+ if smooth_groups:
+ sort_func = lambda a: (a[0].material_index,
+ smooth_groups[a[1]] if a[0].use_smooth else False)
+ else:
+ sort_func = lambda a: (a[0].material_index,
+ a[0].use_smooth)
+ else:
+ # no materials
+ if smooth_groups:
+ sort_func = lambda a: smooth_groups[a[1] if a[0].use_smooth else False]
+ else:
+ sort_func = lambda a: a[0].use_smooth
+
+ face_index_pairs.sort(key=sort_func)
+
+ del sort_func
+
+ # Set the default mat to no material and no image.
+ contextMat = 0, 0 # Can never be this, so we will label a new material the first chance we get.
+ contextSmooth = None # Will either be true or false, set bad to force initialization switch.
+
+ if EXPORT_BLEN_OBS or EXPORT_GROUP_BY_OB:
+ name1 = ob.name
+ name2 = ob.data.name
+ if name1 == name2:
+ obnamestring = name_compat(name1)
+ else:
+ obnamestring = '%s_%s' % (name_compat(name1), name_compat(name2))
+
+ if EXPORT_BLEN_OBS:
+ fw('o %s\n' % obnamestring) # Write Object name
+ else: # if EXPORT_GROUP_BY_OB:
+ fw('g %s\n' % obnamestring)
+
+ subprogress2.step()
+
+ # Vert
+ for v in me_verts:
+ fw('v %.6f %.6f %.6f\n' % v.co[:])
+
+ subprogress2.step()
+
+ # UV
+ if faceuv:
+ # in case removing some of these dont get defined.
+ uv = f_index = uv_index = uv_key = uv_val = uv_ls = None
+
+ uv_face_mapping = [None] * len(face_index_pairs)
+
+ uv_dict = {}
+ uv_get = uv_dict.get
+ for f, f_index in face_index_pairs:
+ uv_ls = uv_face_mapping[f_index] = []
+ for uv_index, l_index in enumerate(f.loop_indices):
+ uv = uv_layer[l_index].uv
+ uv_key = veckey2d(uv)
+ uv_val = uv_get(uv_key)
+ if uv_val is None:
+ uv_val = uv_dict[uv_key] = uv_unique_count
+ fw('vt %.6f %.6f\n' % uv[:])
+ uv_unique_count += 1
+ uv_ls.append(uv_val)
+
+ del uv_dict, uv, f_index, uv_index, uv_ls, uv_get, uv_key, uv_val
+ # Only need uv_unique_count and uv_face_mapping
+
+ subprogress2.step()
+
+ # NORMAL, Smooth/Non smoothed.
+ if EXPORT_NORMALS:
+ no_key = no_val = None
+ normals_to_idx = {}
+ no_get = normals_to_idx.get
+ loops_to_normals = [0] * len(loops)
+ for f, f_index in face_index_pairs:
+ for l_idx in f.loop_indices:
+ no_key = veckey3d(loops[l_idx].normal)
+ no_val = no_get(no_key)
+ if no_val is None:
+ no_val = normals_to_idx[no_key] = no_unique_count
+ fw('vn %.6f %.6f %.6f\n' % no_key)
+ no_unique_count += 1
+ loops_to_normals[l_idx] = no_val
+ del normals_to_idx, no_get, no_key, no_val
+ else:
+ loops_to_normals = []
+
+ if not faceuv:
+ f_image = None
+
+ subprogress2.step()
+
+ # XXX
+ if EXPORT_POLYGROUPS:
+ # Retrieve the list of vertex groups
+ vertGroupNames = ob.vertex_groups.keys()
+ if vertGroupNames:
+ currentVGroup = ''
+ # Create a dictionary keyed by face id and listing, for each vertex, the vertex groups it belongs to
+ vgroupsMap = [[] for _i in range(len(me_verts))]
+ for v_idx, v_ls in enumerate(vgroupsMap):
+ v_ls[:] = [(vertGroupNames[g.group], g.weight) for g in me_verts[v_idx].groups]
+
+ for f, f_index in face_index_pairs:
+ f_smooth = f.use_smooth
+ if f_smooth and smooth_groups:
+ f_smooth = smooth_groups[f_index]
+ f_mat = min(f.material_index, len(materials) - 1)
+
+ if faceuv:
+ tface = uv_texture[f_index]
+ f_image = tface.image
+
+ # MAKE KEY
+ if faceuv and f_image: # Object is always true.
+ key = material_names[f_mat], f_image.name
+ else:
+ key = material_names[f_mat], None # No image, use None instead.
+
+ # Write the vertex group
+ if EXPORT_POLYGROUPS:
+ if vertGroupNames:
+ # find what vertext group the face belongs to
+ vgroup_of_face = findVertexGroupName(f, vgroupsMap)
+ if vgroup_of_face != currentVGroup:
+ currentVGroup = vgroup_of_face
+ fw('g %s\n' % vgroup_of_face)
+
+ # CHECK FOR CONTEXT SWITCH
+ if key == contextMat:
+ pass # Context already switched, dont do anything
+ else:
+ if key[0] is None and key[1] is None:
+ # Write a null material, since we know the context has changed.
+ if EXPORT_GROUP_BY_MAT:
+ # can be mat_image or (null)
+ fw("g %s_%s\n" % (name_compat(ob.name), name_compat(ob.data.name)))
+ if EXPORT_MTL:
+ fw("usemtl (null)\n") # mat, image
- print("OBJ Export time: %.2f" % (time.time() - time1))
+ else:
+ mat_data = mtl_dict.get(key)
+ if not mat_data:
+ # First add to global dict so we can export to mtl
+ # Then write mtl
+
+ # Make a new names from the mat and image name,
+ # converting any spaces to underscores with name_compat.
+
+ # If none image dont bother adding it to the name
+ # Try to avoid as much as possible adding texname (or other things)
+ # to the mtl name (see [#32102])...
+ mtl_name = "%s" % name_compat(key[0])
+ if mtl_rev_dict.get(mtl_name, None) not in {key, None}:
+ if key[1] is None:
+ tmp_ext = "_NONE"
+ else:
+ tmp_ext = "_%s" % name_compat(key[1])
+ i = 0
+ while mtl_rev_dict.get(mtl_name + tmp_ext, None) not in {key, None}:
+ i += 1
+ tmp_ext = "_%3d" % i
+ mtl_name += tmp_ext
+ mat_data = mtl_dict[key] = mtl_name, materials[f_mat], f_image
+ mtl_rev_dict[mtl_name] = key
+
+ if EXPORT_GROUP_BY_MAT:
+ # can be mat_image or (null)
+ fw("g %s_%s_%s\n" % (name_compat(ob.name), name_compat(ob.data.name), mat_data[0]))
+ if EXPORT_MTL:
+ fw("usemtl %s\n" % mat_data[0]) # can be mat_image or (null)
+
+ contextMat = key
+ if f_smooth != contextSmooth:
+ if f_smooth: # on now off
+ if smooth_groups:
+ f_smooth = smooth_groups[f_index]
+ fw('s %d\n' % f_smooth)
+ else:
+ fw('s 1\n')
+ else: # was off now on
+ fw('s off\n')
+ contextSmooth = f_smooth
+
+ f_v = [(vi, me_verts[v_idx], l_idx)
+ for vi, (v_idx, l_idx) in enumerate(zip(f.vertices, f.loop_indices))]
+
+ fw('f')
+ if faceuv:
+ if EXPORT_NORMALS:
+ for vi, v, li in f_v:
+ fw(" %d/%d/%d" % (totverts + v.index,
+ totuvco + uv_face_mapping[f_index][vi],
+ totno + loops_to_normals[li],
+ )) # vert, uv, normal
+ else: # No Normals
+ for vi, v, li in f_v:
+ fw(" %d/%d" % (totverts + v.index,
+ totuvco + uv_face_mapping[f_index][vi],
+ )) # vert, uv
+
+ face_vert_index += len(f_v)
+
+ else: # No UV's
+ if EXPORT_NORMALS:
+ for vi, v, li in f_v:
+ fw(" %d//%d" % (totverts + v.index, totno + loops_to_normals[li]))
+ else: # No Normals
+ for vi, v, li in f_v:
+ fw(" %d" % (totverts + v.index))
+
+ fw('\n')
+
+ subprogress2.step()
+
+ # Write edges.
+ if EXPORT_EDGES:
+ for ed in edges:
+ if ed.is_loose:
+ fw('l %d %d\n' % (totverts + ed.vertices[0], totverts + ed.vertices[1]))
+
+ # Make the indices global rather then per mesh
+ totverts += len(me_verts)
+ totuvco += uv_unique_count
+ totno += no_unique_count
+
+ # clean up
+ bpy.data.meshes.remove(me)
+
+ if ob_main.dupli_type != 'NONE':
+ ob_main.dupli_list_clear()
+
+ subprogress1.leave_substeps("Finished writing geometry of '%s'." % ob_main.name)
+ subprogress1.leave_substeps()
+
+ subprogress1.step("Finished exporting geometry, now exporting materials")
+
+ # Now we have all our materials, save them
+ if EXPORT_MTL:
+ write_mtl(scene, mtlfilepath, EXPORT_PATH_MODE, copy_set, mtl_dict)
+
+ # copy all collected files.
+ bpy_extras.io_utils.path_reference_copy(copy_set)
def _write(context, filepath,
@@ -708,62 +715,64 @@ def _write(context, filepath,
EXPORT_PATH_MODE, # Not used
):
- base_name, ext = os.path.splitext(filepath)
- context_name = [base_name, '', '', ext] # Base name, scene name, frame number, extension
+ with ProgressReport(context.window_manager) as progress:
+ base_name, ext = os.path.splitext(filepath)
+ context_name = [base_name, '', '', ext] # Base name, scene name, frame number, extension
- scene = context.scene
+ scene = context.scene
- # Exit edit mode before exporting, so current object states are exported properly.
- if bpy.ops.object.mode_set.poll():
- bpy.ops.object.mode_set(mode='OBJECT')
+ # Exit edit mode before exporting, so current object states are exported properly.
+ if bpy.ops.object.mode_set.poll():
+ bpy.ops.object.mode_set(mode='OBJECT')
- orig_frame = scene.frame_current
+ orig_frame = scene.frame_current
- # Export an animation?
- if EXPORT_ANIMATION:
- scene_frames = range(scene.frame_start, scene.frame_end + 1) # Up to and including the end frame.
- else:
- scene_frames = [orig_frame] # Dont export an animation.
+ # Export an animation?
+ if EXPORT_ANIMATION:
+ scene_frames = range(scene.frame_start, scene.frame_end + 1) # Up to and including the end frame.
+ else:
+ scene_frames = [orig_frame] # Dont export an animation.
- # Loop through all frames in the scene and export.
- for frame in scene_frames:
- if EXPORT_ANIMATION: # Add frame to the filepath.
- context_name[2] = '_%.6d' % frame
+ # Loop through all frames in the scene and export.
+ progress.enter_substeps(len(scene_frames))
+ for frame in scene_frames:
+ if EXPORT_ANIMATION: # Add frame to the filepath.
+ context_name[2] = '_%.6d' % frame
- scene.frame_set(frame, 0.0)
- if EXPORT_SEL_ONLY:
- objects = context.selected_objects
- else:
- objects = scene.objects
-
- full_path = ''.join(context_name)
-
- # erm... bit of a problem here, this can overwrite files when exporting frames. not too bad.
- # EXPORT THE FILE.
- write_file(full_path, objects, scene,
- EXPORT_TRI,
- EXPORT_EDGES,
- EXPORT_SMOOTH_GROUPS,
- EXPORT_SMOOTH_GROUPS_BITFLAGS,
- EXPORT_NORMALS,
- EXPORT_UV,
- EXPORT_MTL,
- EXPORT_APPLY_MODIFIERS,
- EXPORT_BLEN_OBS,
- EXPORT_GROUP_BY_OB,
- EXPORT_GROUP_BY_MAT,
- EXPORT_KEEP_VERT_ORDER,
- EXPORT_POLYGROUPS,
- EXPORT_CURVE_AS_NURBS,
- EXPORT_GLOBAL_MATRIX,
- EXPORT_PATH_MODE,
- )
-
- scene.frame_set(orig_frame, 0.0)
-
- # Restore old active scene.
-# orig_scene.makeCurrent()
-# Window.WaitCursor(0)
+ scene.frame_set(frame, 0.0)
+ if EXPORT_SEL_ONLY:
+ objects = context.selected_objects
+ else:
+ objects = scene.objects
+
+ full_path = ''.join(context_name)
+
+ # erm... bit of a problem here, this can overwrite files when exporting frames. not too bad.
+ # EXPORT THE FILE.
+ progress.enter_substeps(1)
+ write_file(full_path, objects, scene,
+ EXPORT_TRI,
+ EXPORT_EDGES,
+ EXPORT_SMOOTH_GROUPS,
+ EXPORT_SMOOTH_GROUPS_BITFLAGS,
+ EXPORT_NORMALS,
+ EXPORT_UV,
+ EXPORT_MTL,
+ EXPORT_APPLY_MODIFIERS,
+ EXPORT_BLEN_OBS,
+ EXPORT_GROUP_BY_OB,
+ EXPORT_GROUP_BY_MAT,
+ EXPORT_KEEP_VERT_ORDER,
+ EXPORT_POLYGROUPS,
+ EXPORT_CURVE_AS_NURBS,
+ EXPORT_GLOBAL_MATRIX,
+ EXPORT_PATH_MODE,
+ progress,
+ )
+ progress.leave_substeps()
+
+ scene.frame_set(orig_frame, 0.0)
+ progress.leave_substeps()
"""