Welcome to mirror list, hosted at ThFree Co, Russian Federation.

git.blender.org/blender-addons.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorBastien Montagne <montagne29@wanadoo.fr>2015-07-11 17:45:54 +0300
committerBastien Montagne <montagne29@wanadoo.fr>2015-07-11 17:45:54 +0300
commit3cc970eaf5c896b8103a88d959ebb0776e4ad590 (patch)
treec377b9479eaa2794b89a481845993112eebf90ea /io_scene_obj
parent7194d2205ecb9920449cc5fac5b0f9418be5c173 (diff)
OBJ IO: add some progress report for user.
Use new 'progress_report' util module to do both a (basic) report of I/O progression, together with some logging/timing of main steps. We could probably go much further, but for now it will do. Also, using files as context manager, too!
Diffstat (limited to 'io_scene_obj')
-rw-r--r--io_scene_obj/__init__.py2
-rw-r--r--io_scene_obj/export_obj.py1061
-rw-r--r--io_scene_obj/import_obj.py636
3 files changed, 850 insertions, 849 deletions
diff --git a/io_scene_obj/__init__.py b/io_scene_obj/__init__.py
index b356da75..508445e8 100644
--- a/io_scene_obj/__init__.py
+++ b/io_scene_obj/__init__.py
@@ -21,7 +21,7 @@
bl_info = {
"name": "Wavefront OBJ format",
"author": "Campbell Barton, Bastien Montagne",
- "version": (2, 1, 3),
+ "version": (2, 2, 0),
"blender": (2, 74, 0),
"location": "File > Import-Export",
"description": "Import-Export OBJ, Import OBJ mesh, UV's, "
diff --git a/io_scene_obj/export_obj.py b/io_scene_obj/export_obj.py
index d47555c1..7399c2e1 100644
--- a/io_scene_obj/export_obj.py
+++ b/io_scene_obj/export_obj.py
@@ -19,12 +19,13 @@
# <pep8 compliant>
import os
-import time
import bpy
import mathutils
import bpy_extras.io_utils
+from progress_report import ProgressReport, ProgressReportSubstep
+
def name_compat(name):
if name is None:
@@ -54,132 +55,129 @@ def write_mtl(scene, filepath, path_mode, copy_set, mtl_dict):
source_dir = os.path.dirname(bpy.data.filepath)
dest_dir = os.path.dirname(filepath)
- file = open(filepath, "w", encoding="utf8", newline="\n")
- fw = file.write
-
- fw('# Blender MTL File: %r\n' % (os.path.basename(bpy.data.filepath) or "None"))
- fw('# Material Count: %i\n' % len(mtl_dict))
+ with open(filepath, "w", encoding="utf8", newline="\n") as f:
+ fw = f.write
- mtl_dict_values = list(mtl_dict.values())
- mtl_dict_values.sort(key=lambda m: m[0])
+ fw('# Blender MTL File: %r\n' % (os.path.basename(bpy.data.filepath) or "None"))
+ fw('# Material Count: %i\n' % len(mtl_dict))
- # Write material/image combinations we have used.
- # Using mtl_dict.values() directly gives un-predictable order.
- for mtl_mat_name, mat, face_img in mtl_dict_values:
+ mtl_dict_values = list(mtl_dict.values())
+ mtl_dict_values.sort(key=lambda m: m[0])
- # Get the Blender data for the material and the image.
- # Having an image named None will make a bug, dont do it :)
+ # Write material/image combinations we have used.
+ # Using mtl_dict.values() directly gives un-predictable order.
+ for mtl_mat_name, mat, face_img in mtl_dict_values:
+ # Get the Blender data for the material and the image.
+ # Having an image named None will make a bug, dont do it :)
- fw('\nnewmtl %s\n' % mtl_mat_name) # Define a new material: matname_imgname
+ fw('\nnewmtl %s\n' % mtl_mat_name) # Define a new material: matname_imgname
- if mat:
- use_mirror = mat.raytrace_mirror.use and mat.raytrace_mirror.reflect_factor != 0.0
+ if mat:
+ use_mirror = mat.raytrace_mirror.use and mat.raytrace_mirror.reflect_factor != 0.0
- # convert from blenders spec to 0 - 1000 range.
- if mat.specular_shader == 'WARDISO':
- tspec = (0.4 - mat.specular_slope) / 0.0004
- else:
- tspec = (mat.specular_hardness - 1) / 0.51
- fw('Ns %.6f\n' % tspec)
- del tspec
+ # convert from blenders spec to 0 - 1000 range.
+ if mat.specular_shader == 'WARDISO':
+ tspec = (0.4 - mat.specular_slope) / 0.0004
+ else:
+ tspec = (mat.specular_hardness - 1) / 0.51
+ fw('Ns %.6f\n' % tspec)
+ del tspec
- # Ambient
- if use_mirror:
- fw('Ka %.6f %.6f %.6f\n' % (mat.raytrace_mirror.reflect_factor * mat.mirror_color)[:])
- else:
- fw('Ka %.6f %.6f %.6f\n' % (mat.ambient, mat.ambient, mat.ambient)) # Do not use world color!
- fw('Kd %.6f %.6f %.6f\n' % (mat.diffuse_intensity * mat.diffuse_color)[:]) # Diffuse
- fw('Ks %.6f %.6f %.6f\n' % (mat.specular_intensity * mat.specular_color)[:]) # Specular
- if hasattr(mat, "raytrace_transparency") and hasattr(mat.raytrace_transparency, "ior"):
- fw('Ni %.6f\n' % mat.raytrace_transparency.ior) # Refraction index
- else:
- fw('Ni %.6f\n' % 1.0)
- fw('d %.6f\n' % mat.alpha) # Alpha (obj uses 'd' for dissolve)
-
- # See http://en.wikipedia.org/wiki/Wavefront_.obj_file for whole list of values...
- # Note that mapping is rather fuzzy sometimes, trying to do our best here.
- if mat.use_shadeless:
- fw('illum 0\n') # ignore lighting
- elif mat.specular_intensity == 0:
- fw('illum 1\n') # no specular.
- elif use_mirror:
- if mat.use_transparency and mat.transparency_method == 'RAYTRACE':
- if mat.raytrace_mirror.fresnel != 0.0:
- fw('illum 7\n') # Reflection, Transparency, Ray trace and Fresnel
+ # Ambient
+ if use_mirror:
+ fw('Ka %.6f %.6f %.6f\n' % (mat.raytrace_mirror.reflect_factor * mat.mirror_color)[:])
+ else:
+ fw('Ka %.6f %.6f %.6f\n' % (mat.ambient, mat.ambient, mat.ambient)) # Do not use world color!
+ fw('Kd %.6f %.6f %.6f\n' % (mat.diffuse_intensity * mat.diffuse_color)[:]) # Diffuse
+ fw('Ks %.6f %.6f %.6f\n' % (mat.specular_intensity * mat.specular_color)[:]) # Specular
+ if hasattr(mat, "raytrace_transparency") and hasattr(mat.raytrace_transparency, "ior"):
+ fw('Ni %.6f\n' % mat.raytrace_transparency.ior) # Refraction index
+ else:
+ fw('Ni %.6f\n' % 1.0)
+ fw('d %.6f\n' % mat.alpha) # Alpha (obj uses 'd' for dissolve)
+
+ # See http://en.wikipedia.org/wiki/Wavefront_.obj_file for whole list of values...
+ # Note that mapping is rather fuzzy sometimes, trying to do our best here.
+ if mat.use_shadeless:
+ fw('illum 0\n') # ignore lighting
+ elif mat.specular_intensity == 0:
+ fw('illum 1\n') # no specular.
+ elif use_mirror:
+ if mat.use_transparency and mat.transparency_method == 'RAYTRACE':
+ if mat.raytrace_mirror.fresnel != 0.0:
+ fw('illum 7\n') # Reflection, Transparency, Ray trace and Fresnel
+ else:
+ fw('illum 6\n') # Reflection, Transparency, Ray trace
+ elif mat.raytrace_mirror.fresnel != 0.0:
+ fw('illum 5\n') # Reflection, Ray trace and Fresnel
else:
- fw('illum 6\n') # Reflection, Transparency, Ray trace
- elif mat.raytrace_mirror.fresnel != 0.0:
- fw('illum 5\n') # Reflection, Ray trace and Fresnel
+ fw('illum 3\n') # Reflection and Ray trace
+ elif mat.use_transparency and mat.transparency_method == 'RAYTRACE':
+ fw('illum 9\n') # 'Glass' transparency and no Ray trace reflection... fuzzy matching, but...
else:
- fw('illum 3\n') # Reflection and Ray trace
- elif mat.use_transparency and mat.transparency_method == 'RAYTRACE':
- fw('illum 9\n') # 'Glass' transparency and no Ray trace reflection... fuzzy matching, but...
+ fw('illum 2\n') # light normaly
+
else:
+ # Write a dummy material here?
+ fw('Ns 0\n')
+ fw('Ka %.6f %.6f %.6f\n' % world_amb[:]) # Ambient, uses mirror color,
+ fw('Kd 0.8 0.8 0.8\n')
+ fw('Ks 0.8 0.8 0.8\n')
+ fw('d 1\n') # No alpha
fw('illum 2\n') # light normaly
- else:
- # Write a dummy material here?
- fw('Ns 0\n')
- fw('Ka %.6f %.6f %.6f\n' % world_amb[:]) # Ambient, uses mirror color,
- fw('Kd 0.8 0.8 0.8\n')
- fw('Ks 0.8 0.8 0.8\n')
- fw('d 1\n') # No alpha
- fw('illum 2\n') # light normaly
-
- # Write images!
- if face_img: # We have an image on the face!
- filepath = face_img.filepath
- if filepath: # may be '' for generated images
- # write relative image path
- filepath = bpy_extras.io_utils.path_reference(filepath, source_dir, dest_dir,
- path_mode, "", copy_set, face_img.library)
- fw('map_Kd %s\n' % filepath) # Diffuse mapping image
- del filepath
- else:
- # so we write the materials image.
- face_img = None
-
- if mat: # No face image. if we havea material search for MTex image.
- image_map = {}
- # backwards so topmost are highest priority
- for mtex in reversed(mat.texture_slots):
- if mtex and mtex.texture and mtex.texture.type == 'IMAGE':
- image = mtex.texture.image
- if image:
- # texface overrides others
- if (mtex.use_map_color_diffuse and (face_img is None) and
- (mtex.use_map_warp is False) and (mtex.texture_coords != 'REFLECTION')):
- image_map["map_Kd"] = image
- if mtex.use_map_ambient:
- image_map["map_Ka"] = image
- # this is the Spec intensity channel but Ks stands for specular Color
- '''
- if mtex.use_map_specular:
- image_map["map_Ks"] = image
- '''
- if mtex.use_map_color_spec: # specular color
- image_map["map_Ks"] = image
- if mtex.use_map_hardness: # specular hardness/glossiness
- image_map["map_Ns"] = image
- if mtex.use_map_alpha:
- image_map["map_d"] = image
- if mtex.use_map_translucency:
- image_map["map_Tr"] = image
- if mtex.use_map_normal:
- image_map["map_Bump"] = image
- if mtex.use_map_displacement:
- image_map["disp"] = image
- if mtex.use_map_color_diffuse and (mtex.texture_coords == 'REFLECTION'):
- image_map["refl"] = image
- if mtex.use_map_emit:
- image_map["map_Ke"] = image
-
- for key, image in sorted(image_map.items()):
- filepath = bpy_extras.io_utils.path_reference(image.filepath, source_dir, dest_dir,
- path_mode, "", copy_set, image.library)
- fw('%s %s\n' % (key, repr(filepath)[1:-1]))
-
- file.close()
+ # Write images!
+ if face_img: # We have an image on the face!
+ filepath = face_img.filepath
+ if filepath: # may be '' for generated images
+ # write relative image path
+ filepath = bpy_extras.io_utils.path_reference(filepath, source_dir, dest_dir,
+ path_mode, "", copy_set, face_img.library)
+ fw('map_Kd %s\n' % filepath) # Diffuse mapping image
+ del filepath
+ else:
+ # so we write the materials image.
+ face_img = None
+
+ if mat: # No face image. if we havea material search for MTex image.
+ image_map = {}
+ # backwards so topmost are highest priority
+ for mtex in reversed(mat.texture_slots):
+ if mtex and mtex.texture and mtex.texture.type == 'IMAGE':
+ image = mtex.texture.image
+ if image:
+ # texface overrides others
+ if (mtex.use_map_color_diffuse and (face_img is None) and
+ (mtex.use_map_warp is False) and (mtex.texture_coords != 'REFLECTION')):
+ image_map["map_Kd"] = image
+ if mtex.use_map_ambient:
+ image_map["map_Ka"] = image
+ # this is the Spec intensity channel but Ks stands for specular Color
+ '''
+ if mtex.use_map_specular:
+ image_map["map_Ks"] = image
+ '''
+ if mtex.use_map_color_spec: # specular color
+ image_map["map_Ks"] = image
+ if mtex.use_map_hardness: # specular hardness/glossiness
+ image_map["map_Ns"] = image
+ if mtex.use_map_alpha:
+ image_map["map_d"] = image
+ if mtex.use_map_translucency:
+ image_map["map_Tr"] = image
+ if mtex.use_map_normal:
+ image_map["map_Bump"] = image
+ if mtex.use_map_displacement:
+ image_map["disp"] = image
+ if mtex.use_map_color_diffuse and (mtex.texture_coords == 'REFLECTION'):
+ image_map["refl"] = image
+ if mtex.use_map_emit:
+ image_map["map_Ke"] = image
+
+ for key, image in sorted(image_map.items()):
+ filepath = bpy_extras.io_utils.path_reference(image.filepath, source_dir, dest_dir,
+ path_mode, "", copy_set, image.library)
+ fw('%s %s\n' % (key, repr(filepath)[1:-1]))
def test_nurbs_compat(ob):
@@ -276,6 +274,7 @@ def write_file(filepath, objects, scene,
EXPORT_CURVE_AS_NURBS=True,
EXPORT_GLOBAL_MATRIX=None,
EXPORT_PATH_MODE='AUTO',
+ progress=ProgressReport(),
):
"""
Basic write function. The context and options must be already set
@@ -283,7 +282,6 @@ def write_file(filepath, objects, scene,
eg.
write( 'c:\\test\\foobar.obj', Blender.Object.GetSelected() ) # Using default options.
"""
-
if EXPORT_GLOBAL_MATRIX is None:
EXPORT_GLOBAL_MATRIX = mathutils.Matrix()
@@ -313,378 +311,387 @@ def write_file(filepath, objects, scene,
else:
return '(null)'
- print('OBJ Export path: %r' % filepath)
-
- time1 = time.time()
-
- file = open(filepath, "w", encoding="utf8", newline="\n")
- fw = file.write
-
- # Write Header
- fw('# Blender v%s OBJ File: %r\n' % (bpy.app.version_string, os.path.basename(bpy.data.filepath)))
- fw('# www.blender.org\n')
-
- # Tell the obj file what material file to use.
- if EXPORT_MTL:
- mtlfilepath = os.path.splitext(filepath)[0] + ".mtl"
- fw('mtllib %s\n' % repr(os.path.basename(mtlfilepath))[1:-1]) # filepath can contain non utf8 chars, use repr
-
- # Initialize totals, these are updated each object
- totverts = totuvco = totno = 1
-
- face_vert_index = 1
-
- # A Dict of Materials
- # (material.name, image.name):matname_imagename # matname_imagename has gaps removed.
- mtl_dict = {}
- # Used to reduce the usage of matname_texname materials, which can become annoying in case of
- # repeated exports/imports, yet keeping unique mat names per keys!
- # mtl_name: (material.name, image.name)
- mtl_rev_dict = {}
-
- copy_set = set()
-
- # Get all meshes
- for ob_main in objects:
-
- # ignore dupli children
- if ob_main.parent and ob_main.parent.dupli_type in {'VERTS', 'FACES'}:
- # XXX
- print(ob_main.name, 'is a dupli child - ignoring')
- continue
-
- obs = []
- if ob_main.dupli_type != 'NONE':
- # XXX
- print('creating dupli_list on', ob_main.name)
- ob_main.dupli_list_create(scene)
-
- obs = [(dob.object, dob.matrix) for dob in ob_main.dupli_list]
-
- # XXX debug print
- print(ob_main.name, 'has', len(obs), 'dupli children')
- else:
- obs = [(ob_main, ob_main.matrix_world)]
-
- for ob, ob_mat in obs:
- uv_unique_count = no_unique_count = 0
-
- # Nurbs curve support
- if EXPORT_CURVE_AS_NURBS and test_nurbs_compat(ob):
- ob_mat = EXPORT_GLOBAL_MATRIX * ob_mat
- totverts += write_nurb(fw, ob, ob_mat)
- continue
- # END NURBS
-
- try:
- me = ob.to_mesh(scene, EXPORT_APPLY_MODIFIERS, 'PREVIEW', calc_tessface=False)
- except RuntimeError:
- me = None
-
- if me is None:
- continue
-
- me.transform(EXPORT_GLOBAL_MATRIX * ob_mat)
-
- if EXPORT_TRI:
- # _must_ do this first since it re-allocs arrays
- mesh_triangulate(me)
-
- if EXPORT_UV:
- faceuv = len(me.uv_textures) > 0
- if faceuv:
- uv_texture = me.uv_textures.active.data[:]
- uv_layer = me.uv_layers.active.data[:]
- else:
- faceuv = False
-
- me_verts = me.vertices[:]
-
- # Make our own list so it can be sorted to reduce context switching
- face_index_pairs = [(face, index) for index, face in enumerate(me.polygons)]
- # faces = [ f for f in me.tessfaces ]
-
- if EXPORT_EDGES:
- edges = me.edges
- else:
- edges = []
-
- if not (len(face_index_pairs) + len(edges) + len(me.vertices)): # Make sure there is somthing to write
-
- # clean up
- bpy.data.meshes.remove(me)
-
- continue # dont bother with this mesh.
-
- if EXPORT_NORMALS and face_index_pairs:
- me.calc_normals_split()
- # No need to call me.free_normals_split later, as this mesh is deleted anyway!
- loops = me.loops
- else:
- loops = []
-
- if (EXPORT_SMOOTH_GROUPS or EXPORT_SMOOTH_GROUPS_BITFLAGS) and face_index_pairs:
- smooth_groups, smooth_groups_tot = me.calc_smooth_groups(EXPORT_SMOOTH_GROUPS_BITFLAGS)
- if smooth_groups_tot <= 1:
- smooth_groups, smooth_groups_tot = (), 0
- else:
- smooth_groups, smooth_groups_tot = (), 0
-
- materials = me.materials[:]
- material_names = [m.name if m else None for m in materials]
-
- # avoid bad index errors
- if not materials:
- materials = [None]
- material_names = [name_compat(None)]
-
- # Sort by Material, then images
- # so we dont over context switch in the obj file.
- if EXPORT_KEEP_VERT_ORDER:
- pass
- else:
- if faceuv:
- if smooth_groups:
- sort_func = lambda a: (a[0].material_index,
- hash(uv_texture[a[1]].image),
- smooth_groups[a[1]] if a[0].use_smooth else False)
- else:
- sort_func = lambda a: (a[0].material_index,
- hash(uv_texture[a[1]].image),
- a[0].use_smooth)
- elif len(materials) > 1:
- if smooth_groups:
- sort_func = lambda a: (a[0].material_index,
- smooth_groups[a[1]] if a[0].use_smooth else False)
- else:
- sort_func = lambda a: (a[0].material_index,
- a[0].use_smooth)
- else:
- # no materials
- if smooth_groups:
- sort_func = lambda a: smooth_groups[a[1] if a[0].use_smooth else False]
- else:
- sort_func = lambda a: a[0].use_smooth
-
- face_index_pairs.sort(key=sort_func)
-
- del sort_func
-
- # Set the default mat to no material and no image.
- contextMat = 0, 0 # Can never be this, so we will label a new material the first chance we get.
- contextSmooth = None # Will either be true or false, set bad to force initialization switch.
-
- if EXPORT_BLEN_OBS or EXPORT_GROUP_BY_OB:
- name1 = ob.name
- name2 = ob.data.name
- if name1 == name2:
- obnamestring = name_compat(name1)
- else:
- obnamestring = '%s_%s' % (name_compat(name1), name_compat(name2))
-
- if EXPORT_BLEN_OBS:
- fw('o %s\n' % obnamestring) # Write Object name
- else: # if EXPORT_GROUP_BY_OB:
- fw('g %s\n' % obnamestring)
-
- # Vert
- for v in me_verts:
- fw('v %.6f %.6f %.6f\n' % v.co[:])
-
- # UV
- if faceuv:
- # in case removing some of these dont get defined.
- uv = f_index = uv_index = uv_key = uv_val = uv_ls = None
-
- uv_face_mapping = [None] * len(face_index_pairs)
-
- uv_dict = {}
- uv_get = uv_dict.get
- for f, f_index in face_index_pairs:
- uv_ls = uv_face_mapping[f_index] = []
- for uv_index, l_index in enumerate(f.loop_indices):
- uv = uv_layer[l_index].uv
- uv_key = veckey2d(uv)
- uv_val = uv_get(uv_key)
- if uv_val is None:
- uv_val = uv_dict[uv_key] = uv_unique_count
- fw('vt %.6f %.6f\n' % uv[:])
- uv_unique_count += 1
- uv_ls.append(uv_val)
-
- del uv_dict, uv, f_index, uv_index, uv_ls, uv_get, uv_key, uv_val
- # Only need uv_unique_count and uv_face_mapping
-
- # NORMAL, Smooth/Non smoothed.
- if EXPORT_NORMALS:
- no_key = no_val = None
- normals_to_idx = {}
- no_get = normals_to_idx.get
- loops_to_normals = [0] * len(loops)
- for f, f_index in face_index_pairs:
- for l_idx in f.loop_indices:
- no_key = veckey3d(loops[l_idx].normal)
- no_val = no_get(no_key)
- if no_val is None:
- no_val = normals_to_idx[no_key] = no_unique_count
- fw('vn %.6f %.6f %.6f\n' % no_key)
- no_unique_count += 1
- loops_to_normals[l_idx] = no_val
- del normals_to_idx, no_get, no_key, no_val
- else:
- loops_to_normals = []
-
- if not faceuv:
- f_image = None
-
- # XXX
- if EXPORT_POLYGROUPS:
- # Retrieve the list of vertex groups
- vertGroupNames = ob.vertex_groups.keys()
- if vertGroupNames:
- currentVGroup = ''
- # Create a dictionary keyed by face id and listing, for each vertex, the vertex groups it belongs to
- vgroupsMap = [[] for _i in range(len(me_verts))]
- for v_idx, v_ls in enumerate(vgroupsMap):
- v_ls[:] = [(vertGroupNames[g.group], g.weight) for g in me_verts[v_idx].groups]
-
- for f, f_index in face_index_pairs:
- f_smooth = f.use_smooth
- if f_smooth and smooth_groups:
- f_smooth = smooth_groups[f_index]
- f_mat = min(f.material_index, len(materials) - 1)
-
- if faceuv:
- tface = uv_texture[f_index]
- f_image = tface.image
-
- # MAKE KEY
- if faceuv and f_image: # Object is always true.
- key = material_names[f_mat], f_image.name
+ with ProgressReportSubstep(progress, 2, "OBJ Export path: %r" % filepath, "OBJ Export Finished") as subprogress1:
+ with open(filepath, "w", encoding="utf8", newline="\n") as f:
+ fw = f.write
+
+ # Write Header
+ fw('# Blender v%s OBJ File: %r\n' % (bpy.app.version_string, os.path.basename(bpy.data.filepath)))
+ fw('# www.blender.org\n')
+
+ # Tell the obj file what material file to use.
+ if EXPORT_MTL:
+ mtlfilepath = os.path.splitext(filepath)[0] + ".mtl"
+ # filepath can contain non utf8 chars, use repr
+ fw('mtllib %s\n' % repr(os.path.basename(mtlfilepath))[1:-1])
+
+ # Initialize totals, these are updated each object
+ totverts = totuvco = totno = 1
+
+ face_vert_index = 1
+
+ # A Dict of Materials
+ # (material.name, image.name):matname_imagename # matname_imagename has gaps removed.
+ mtl_dict = {}
+ # Used to reduce the usage of matname_texname materials, which can become annoying in case of
+ # repeated exports/imports, yet keeping unique mat names per keys!
+ # mtl_name: (material.name, image.name)
+ mtl_rev_dict = {}
+
+ copy_set = set()
+
+ # Get all meshes
+ subprogress1.enter_substeps(len(objects))
+ for i, ob_main in enumerate(objects):
+ # ignore dupli children
+ if ob_main.parent and ob_main.parent.dupli_type in {'VERTS', 'FACES'}:
+ # XXX
+ subprogress1.step("Ignoring %s, dupli child..." % ob_main.name)
+ continue
+
+ obs = []
+ if ob_main.dupli_type != 'NONE':
+ # XXX
+ print('creating dupli_list on', ob_main.name)
+ ob_main.dupli_list_create(scene)
+
+ obs = [(dob.object, dob.matrix) for dob in ob_main.dupli_list]
+
+ # XXX debug print
+ print(ob_main.name, 'has', len(obs), 'dupli children')
else:
- key = material_names[f_mat], None # No image, use None instead.
-
- # Write the vertex group
- if EXPORT_POLYGROUPS:
- if vertGroupNames:
- # find what vertext group the face belongs to
- vgroup_of_face = findVertexGroupName(f, vgroupsMap)
- if vgroup_of_face != currentVGroup:
- currentVGroup = vgroup_of_face
- fw('g %s\n' % vgroup_of_face)
-
- # CHECK FOR CONTEXT SWITCH
- if key == contextMat:
- pass # Context already switched, dont do anything
- else:
- if key[0] is None and key[1] is None:
- # Write a null material, since we know the context has changed.
- if EXPORT_GROUP_BY_MAT:
- # can be mat_image or (null)
- fw("g %s_%s\n" % (name_compat(ob.name), name_compat(ob.data.name)))
- if EXPORT_MTL:
- fw("usemtl (null)\n") # mat, image
-
- else:
- mat_data = mtl_dict.get(key)
- if not mat_data:
- # First add to global dict so we can export to mtl
- # Then write mtl
-
- # Make a new names from the mat and image name,
- # converting any spaces to underscores with name_compat.
-
- # If none image dont bother adding it to the name
- # Try to avoid as much as possible adding texname (or other things)
- # to the mtl name (see [#32102])...
- mtl_name = "%s" % name_compat(key[0])
- if mtl_rev_dict.get(mtl_name, None) not in {key, None}:
- if key[1] is None:
- tmp_ext = "_NONE"
- else:
- tmp_ext = "_%s" % name_compat(key[1])
- i = 0
- while mtl_rev_dict.get(mtl_name + tmp_ext, None) not in {key, None}:
- i += 1
- tmp_ext = "_%3d" % i
- mtl_name += tmp_ext
- mat_data = mtl_dict[key] = mtl_name, materials[f_mat], f_image
- mtl_rev_dict[mtl_name] = key
-
- if EXPORT_GROUP_BY_MAT:
- # can be mat_image or (null)
- fw("g %s_%s_%s\n" % (name_compat(ob.name), name_compat(ob.data.name), mat_data[0]))
- if EXPORT_MTL:
- fw("usemtl %s\n" % mat_data[0]) # can be mat_image or (null)
-
- contextMat = key
- if f_smooth != contextSmooth:
- if f_smooth: # on now off
- if smooth_groups:
- f_smooth = smooth_groups[f_index]
- fw('s %d\n' % f_smooth)
+ obs = [(ob_main, ob_main.matrix_world)]
+
+ subprogress1.enter_substeps(len(obs))
+ for ob, ob_mat in obs:
+ with ProgressReportSubstep(subprogress1, 6) as subprogress2:
+ uv_unique_count = no_unique_count = 0
+
+ # Nurbs curve support
+ if EXPORT_CURVE_AS_NURBS and test_nurbs_compat(ob):
+ ob_mat = EXPORT_GLOBAL_MATRIX * ob_mat
+ totverts += write_nurb(fw, ob, ob_mat)
+ continue
+ # END NURBS
+
+ try:
+ me = ob.to_mesh(scene, EXPORT_APPLY_MODIFIERS, 'PREVIEW', calc_tessface=False)
+ except RuntimeError:
+ me = None
+
+ if me is None:
+ continue
+
+ me.transform(EXPORT_GLOBAL_MATRIX * ob_mat)
+
+ if EXPORT_TRI:
+ # _must_ do this first since it re-allocs arrays
+ mesh_triangulate(me)
+
+ if EXPORT_UV:
+ faceuv = len(me.uv_textures) > 0
+ if faceuv:
+ uv_texture = me.uv_textures.active.data[:]
+ uv_layer = me.uv_layers.active.data[:]
else:
- fw('s 1\n')
- else: # was off now on
- fw('s off\n')
- contextSmooth = f_smooth
-
- f_v = [(vi, me_verts[v_idx], l_idx)
- for vi, (v_idx, l_idx) in enumerate(zip(f.vertices, f.loop_indices))]
+ faceuv = False
- fw('f')
- if faceuv:
- if EXPORT_NORMALS:
- for vi, v, li in f_v:
- fw(" %d/%d/%d" % (totverts + v.index,
- totuvco + uv_face_mapping[f_index][vi],
- totno + loops_to_normals[li],
- )) # vert, uv, normal
- else: # No Normals
- for vi, v, li in f_v:
- fw(" %d/%d" % (totverts + v.index,
- totuvco + uv_face_mapping[f_index][vi],
- )) # vert, uv
+ me_verts = me.vertices[:]
- face_vert_index += len(f_v)
+ # Make our own list so it can be sorted to reduce context switching
+ face_index_pairs = [(face, index) for index, face in enumerate(me.polygons)]
+ # faces = [ f for f in me.tessfaces ]
- else: # No UV's
- if EXPORT_NORMALS:
- for vi, v, li in f_v:
- fw(" %d//%d" % (totverts + v.index, totno + loops_to_normals[li]))
- else: # No Normals
- for vi, v, li in f_v:
- fw(" %d" % (totverts + v.index))
-
- fw('\n')
-
- # Write edges.
- if EXPORT_EDGES:
- for ed in edges:
- if ed.is_loose:
- fw('l %d %d\n' % (totverts + ed.vertices[0], totverts + ed.vertices[1]))
+ if EXPORT_EDGES:
+ edges = me.edges
+ else:
+ edges = []
- # Make the indices global rather then per mesh
- totverts += len(me_verts)
- totuvco += uv_unique_count
- totno += no_unique_count
+ if not (len(face_index_pairs) + len(edges) + len(me.vertices)): # Make sure there is something to write
+ # clean up
+ bpy.data.meshes.remove(me)
+ continue # dont bother with this mesh.
- # clean up
- bpy.data.meshes.remove(me)
+ if EXPORT_NORMALS and face_index_pairs:
+ me.calc_normals_split()
+ # No need to call me.free_normals_split later, as this mesh is deleted anyway!
+ loops = me.loops
+ else:
+ loops = []
- if ob_main.dupli_type != 'NONE':
- ob_main.dupli_list_clear()
+ if (EXPORT_SMOOTH_GROUPS or EXPORT_SMOOTH_GROUPS_BITFLAGS) and face_index_pairs:
+ smooth_groups, smooth_groups_tot = me.calc_smooth_groups(EXPORT_SMOOTH_GROUPS_BITFLAGS)
+ if smooth_groups_tot <= 1:
+ smooth_groups, smooth_groups_tot = (), 0
+ else:
+ smooth_groups, smooth_groups_tot = (), 0
- file.close()
+ materials = me.materials[:]
+ material_names = [m.name if m else None for m in materials]
- # Now we have all our materials, save them
- if EXPORT_MTL:
- write_mtl(scene, mtlfilepath, EXPORT_PATH_MODE, copy_set, mtl_dict)
+ # avoid bad index errors
+ if not materials:
+ materials = [None]
+ material_names = [name_compat(None)]
- # copy all collected files.
- bpy_extras.io_utils.path_reference_copy(copy_set)
+ # Sort by Material, then images
+ # so we dont over context switch in the obj file.
+ if EXPORT_KEEP_VERT_ORDER:
+ pass
+ else:
+ if faceuv:
+ if smooth_groups:
+ sort_func = lambda a: (a[0].material_index,
+ hash(uv_texture[a[1]].image),
+ smooth_groups[a[1]] if a[0].use_smooth else False)
+ else:
+ sort_func = lambda a: (a[0].material_index,
+ hash(uv_texture[a[1]].image),
+ a[0].use_smooth)
+ elif len(materials) > 1:
+ if smooth_groups:
+ sort_func = lambda a: (a[0].material_index,
+ smooth_groups[a[1]] if a[0].use_smooth else False)
+ else:
+ sort_func = lambda a: (a[0].material_index,
+ a[0].use_smooth)
+ else:
+ # no materials
+ if smooth_groups:
+ sort_func = lambda a: smooth_groups[a[1] if a[0].use_smooth else False]
+ else:
+ sort_func = lambda a: a[0].use_smooth
+
+ face_index_pairs.sort(key=sort_func)
+
+ del sort_func
+
+ # Set the default mat to no material and no image.
+ contextMat = 0, 0 # Can never be this, so we will label a new material the first chance we get.
+ contextSmooth = None # Will either be true or false, set bad to force initialization switch.
+
+ if EXPORT_BLEN_OBS or EXPORT_GROUP_BY_OB:
+ name1 = ob.name
+ name2 = ob.data.name
+ if name1 == name2:
+ obnamestring = name_compat(name1)
+ else:
+ obnamestring = '%s_%s' % (name_compat(name1), name_compat(name2))
+
+ if EXPORT_BLEN_OBS:
+ fw('o %s\n' % obnamestring) # Write Object name
+ else: # if EXPORT_GROUP_BY_OB:
+ fw('g %s\n' % obnamestring)
+
+ subprogress2.step()
+
+ # Vert
+ for v in me_verts:
+ fw('v %.6f %.6f %.6f\n' % v.co[:])
+
+ subprogress2.step()
+
+ # UV
+ if faceuv:
+ # in case removing some of these dont get defined.
+ uv = f_index = uv_index = uv_key = uv_val = uv_ls = None
+
+ uv_face_mapping = [None] * len(face_index_pairs)
+
+ uv_dict = {}
+ uv_get = uv_dict.get
+ for f, f_index in face_index_pairs:
+ uv_ls = uv_face_mapping[f_index] = []
+ for uv_index, l_index in enumerate(f.loop_indices):
+ uv = uv_layer[l_index].uv
+ uv_key = veckey2d(uv)
+ uv_val = uv_get(uv_key)
+ if uv_val is None:
+ uv_val = uv_dict[uv_key] = uv_unique_count
+ fw('vt %.6f %.6f\n' % uv[:])
+ uv_unique_count += 1
+ uv_ls.append(uv_val)
+
+ del uv_dict, uv, f_index, uv_index, uv_ls, uv_get, uv_key, uv_val
+ # Only need uv_unique_count and uv_face_mapping
+
+ subprogress2.step()
+
+ # NORMAL, Smooth/Non smoothed.
+ if EXPORT_NORMALS:
+ no_key = no_val = None
+ normals_to_idx = {}
+ no_get = normals_to_idx.get
+ loops_to_normals = [0] * len(loops)
+ for f, f_index in face_index_pairs:
+ for l_idx in f.loop_indices:
+ no_key = veckey3d(loops[l_idx].normal)
+ no_val = no_get(no_key)
+ if no_val is None:
+ no_val = normals_to_idx[no_key] = no_unique_count
+ fw('vn %.6f %.6f %.6f\n' % no_key)
+ no_unique_count += 1
+ loops_to_normals[l_idx] = no_val
+ del normals_to_idx, no_get, no_key, no_val
+ else:
+ loops_to_normals = []
+
+ if not faceuv:
+ f_image = None
+
+ subprogress2.step()
+
+ # XXX
+ if EXPORT_POLYGROUPS:
+ # Retrieve the list of vertex groups
+ vertGroupNames = ob.vertex_groups.keys()
+ if vertGroupNames:
+ currentVGroup = ''
+ # Create a dictionary keyed by face id and listing, for each vertex, the vertex groups it belongs to
+ vgroupsMap = [[] for _i in range(len(me_verts))]
+ for v_idx, v_ls in enumerate(vgroupsMap):
+ v_ls[:] = [(vertGroupNames[g.group], g.weight) for g in me_verts[v_idx].groups]
+
+ for f, f_index in face_index_pairs:
+ f_smooth = f.use_smooth
+ if f_smooth and smooth_groups:
+ f_smooth = smooth_groups[f_index]
+ f_mat = min(f.material_index, len(materials) - 1)
+
+ if faceuv:
+ tface = uv_texture[f_index]
+ f_image = tface.image
+
+ # MAKE KEY
+ if faceuv and f_image: # Object is always true.
+ key = material_names[f_mat], f_image.name
+ else:
+ key = material_names[f_mat], None # No image, use None instead.
+
+ # Write the vertex group
+ if EXPORT_POLYGROUPS:
+ if vertGroupNames:
+ # find what vertext group the face belongs to
+ vgroup_of_face = findVertexGroupName(f, vgroupsMap)
+ if vgroup_of_face != currentVGroup:
+ currentVGroup = vgroup_of_face
+ fw('g %s\n' % vgroup_of_face)
+
+ # CHECK FOR CONTEXT SWITCH
+ if key == contextMat:
+ pass # Context already switched, dont do anything
+ else:
+ if key[0] is None and key[1] is None:
+ # Write a null material, since we know the context has changed.
+ if EXPORT_GROUP_BY_MAT:
+ # can be mat_image or (null)
+ fw("g %s_%s\n" % (name_compat(ob.name), name_compat(ob.data.name)))
+ if EXPORT_MTL:
+ fw("usemtl (null)\n") # mat, image
- print("OBJ Export time: %.2f" % (time.time() - time1))
+ else:
+ mat_data = mtl_dict.get(key)
+ if not mat_data:
+ # First add to global dict so we can export to mtl
+ # Then write mtl
+
+ # Make a new names from the mat and image name,
+ # converting any spaces to underscores with name_compat.
+
+ # If none image dont bother adding it to the name
+ # Try to avoid as much as possible adding texname (or other things)
+ # to the mtl name (see [#32102])...
+ mtl_name = "%s" % name_compat(key[0])
+ if mtl_rev_dict.get(mtl_name, None) not in {key, None}:
+ if key[1] is None:
+ tmp_ext = "_NONE"
+ else:
+ tmp_ext = "_%s" % name_compat(key[1])
+ i = 0
+ while mtl_rev_dict.get(mtl_name + tmp_ext, None) not in {key, None}:
+ i += 1
+ tmp_ext = "_%3d" % i
+ mtl_name += tmp_ext
+ mat_data = mtl_dict[key] = mtl_name, materials[f_mat], f_image
+ mtl_rev_dict[mtl_name] = key
+
+ if EXPORT_GROUP_BY_MAT:
+ # can be mat_image or (null)
+ fw("g %s_%s_%s\n" % (name_compat(ob.name), name_compat(ob.data.name), mat_data[0]))
+ if EXPORT_MTL:
+ fw("usemtl %s\n" % mat_data[0]) # can be mat_image or (null)
+
+ contextMat = key
+ if f_smooth != contextSmooth:
+ if f_smooth: # on now off
+ if smooth_groups:
+ f_smooth = smooth_groups[f_index]
+ fw('s %d\n' % f_smooth)
+ else:
+ fw('s 1\n')
+ else: # was off now on
+ fw('s off\n')
+ contextSmooth = f_smooth
+
+ f_v = [(vi, me_verts[v_idx], l_idx)
+ for vi, (v_idx, l_idx) in enumerate(zip(f.vertices, f.loop_indices))]
+
+ fw('f')
+ if faceuv:
+ if EXPORT_NORMALS:
+ for vi, v, li in f_v:
+ fw(" %d/%d/%d" % (totverts + v.index,
+ totuvco + uv_face_mapping[f_index][vi],
+ totno + loops_to_normals[li],
+ )) # vert, uv, normal
+ else: # No Normals
+ for vi, v, li in f_v:
+ fw(" %d/%d" % (totverts + v.index,
+ totuvco + uv_face_mapping[f_index][vi],
+ )) # vert, uv
+
+ face_vert_index += len(f_v)
+
+ else: # No UV's
+ if EXPORT_NORMALS:
+ for vi, v, li in f_v:
+ fw(" %d//%d" % (totverts + v.index, totno + loops_to_normals[li]))
+ else: # No Normals
+ for vi, v, li in f_v:
+ fw(" %d" % (totverts + v.index))
+
+ fw('\n')
+
+ subprogress2.step()
+
+ # Write edges.
+ if EXPORT_EDGES:
+ for ed in edges:
+ if ed.is_loose:
+ fw('l %d %d\n' % (totverts + ed.vertices[0], totverts + ed.vertices[1]))
+
+ # Make the indices global rather then per mesh
+ totverts += len(me_verts)
+ totuvco += uv_unique_count
+ totno += no_unique_count
+
+ # clean up
+ bpy.data.meshes.remove(me)
+
+ if ob_main.dupli_type != 'NONE':
+ ob_main.dupli_list_clear()
+
+ subprogress1.leave_substeps("Finished writing geometry of '%s'." % ob_main.name)
+ subprogress1.leave_substeps()
+
+ subprogress1.step("Finished exporting geometry, now exporting materials")
+
+ # Now we have all our materials, save them
+ if EXPORT_MTL:
+ write_mtl(scene, mtlfilepath, EXPORT_PATH_MODE, copy_set, mtl_dict)
+
+ # copy all collected files.
+ bpy_extras.io_utils.path_reference_copy(copy_set)
def _write(context, filepath,
@@ -708,62 +715,64 @@ def _write(context, filepath,
EXPORT_PATH_MODE, # Not used
):
- base_name, ext = os.path.splitext(filepath)
- context_name = [base_name, '', '', ext] # Base name, scene name, frame number, extension
+ with ProgressReport(context.window_manager) as progress:
+ base_name, ext = os.path.splitext(filepath)
+ context_name = [base_name, '', '', ext] # Base name, scene name, frame number, extension
- scene = context.scene
+ scene = context.scene
- # Exit edit mode before exporting, so current object states are exported properly.
- if bpy.ops.object.mode_set.poll():
- bpy.ops.object.mode_set(mode='OBJECT')
+ # Exit edit mode before exporting, so current object states are exported properly.
+ if bpy.ops.object.mode_set.poll():
+ bpy.ops.object.mode_set(mode='OBJECT')
- orig_frame = scene.frame_current
+ orig_frame = scene.frame_current
- # Export an animation?
- if EXPORT_ANIMATION:
- scene_frames = range(scene.frame_start, scene.frame_end + 1) # Up to and including the end frame.
- else:
- scene_frames = [orig_frame] # Dont export an animation.
+ # Export an animation?
+ if EXPORT_ANIMATION:
+ scene_frames = range(scene.frame_start, scene.frame_end + 1) # Up to and including the end frame.
+ else:
+ scene_frames = [orig_frame] # Dont export an animation.
- # Loop through all frames in the scene and export.
- for frame in scene_frames:
- if EXPORT_ANIMATION: # Add frame to the filepath.
- context_name[2] = '_%.6d' % frame
+ # Loop through all frames in the scene and export.
+ progress.enter_substeps(len(scene_frames))
+ for frame in scene_frames:
+ if EXPORT_ANIMATION: # Add frame to the filepath.
+ context_name[2] = '_%.6d' % frame
- scene.frame_set(frame, 0.0)
- if EXPORT_SEL_ONLY:
- objects = context.selected_objects
- else:
- objects = scene.objects
-
- full_path = ''.join(context_name)
-
- # erm... bit of a problem here, this can overwrite files when exporting frames. not too bad.
- # EXPORT THE FILE.
- write_file(full_path, objects, scene,
- EXPORT_TRI,
- EXPORT_EDGES,
- EXPORT_SMOOTH_GROUPS,
- EXPORT_SMOOTH_GROUPS_BITFLAGS,
- EXPORT_NORMALS,
- EXPORT_UV,
- EXPORT_MTL,
- EXPORT_APPLY_MODIFIERS,
- EXPORT_BLEN_OBS,
- EXPORT_GROUP_BY_OB,
- EXPORT_GROUP_BY_MAT,
- EXPORT_KEEP_VERT_ORDER,
- EXPORT_POLYGROUPS,
- EXPORT_CURVE_AS_NURBS,
- EXPORT_GLOBAL_MATRIX,
- EXPORT_PATH_MODE,
- )
-
- scene.frame_set(orig_frame, 0.0)
-
- # Restore old active scene.
-# orig_scene.makeCurrent()
-# Window.WaitCursor(0)
+ scene.frame_set(frame, 0.0)
+ if EXPORT_SEL_ONLY:
+ objects = context.selected_objects
+ else:
+ objects = scene.objects
+
+ full_path = ''.join(context_name)
+
+ # erm... bit of a problem here, this can overwrite files when exporting frames. not too bad.
+ # EXPORT THE FILE.
+ progress.enter_substeps(1)
+ write_file(full_path, objects, scene,
+ EXPORT_TRI,
+ EXPORT_EDGES,
+ EXPORT_SMOOTH_GROUPS,
+ EXPORT_SMOOTH_GROUPS_BITFLAGS,
+ EXPORT_NORMALS,
+ EXPORT_UV,
+ EXPORT_MTL,
+ EXPORT_APPLY_MODIFIERS,
+ EXPORT_BLEN_OBS,
+ EXPORT_GROUP_BY_OB,
+ EXPORT_GROUP_BY_MAT,
+ EXPORT_KEEP_VERT_ORDER,
+ EXPORT_POLYGROUPS,
+ EXPORT_CURVE_AS_NURBS,
+ EXPORT_GLOBAL_MATRIX,
+ EXPORT_PATH_MODE,
+ progress,
+ )
+ progress.leave_substeps()
+
+ scene.frame_set(orig_frame, 0.0)
+ progress.leave_substeps()
"""
diff --git a/io_scene_obj/import_obj.py b/io_scene_obj/import_obj.py
index 77c2cd4a..99bb65bf 100644
--- a/io_scene_obj/import_obj.py
+++ b/io_scene_obj/import_obj.py
@@ -39,6 +39,8 @@ import mathutils
from bpy_extras.io_utils import unpack_list
from bpy_extras.image_utils import load_image
+from progress_report import ProgressReport, ProgressReportSubstep
+
def line_value(line_split):
"""
@@ -838,344 +840,334 @@ def load(operator, context, filepath,
[], # If non-empty, that face is a Blender-invalid ngon (holes...), need a mutable object for that...
)
- print('\nimporting obj %r' % filepath)
-
- filepath = os.fsencode(filepath)
+ with ProgressReport(context.window_manager) as progress:
+ progress.enter_substeps(1, "Importing OBJ %r..." % filepath)
+
+ if global_matrix is None:
+ global_matrix = mathutils.Matrix()
+
+ if use_split_objects or use_split_groups:
+ use_groups_as_vgroups = False
+
+ time_main = time.time()
+
+ verts_loc = []
+ verts_nor = []
+ verts_tex = []
+ faces = [] # tuples of the faces
+ material_libs = [] # filanems to material libs this uses
+ vertex_groups = {} # when use_groups_as_vgroups is true
+
+ # Get the string to float conversion func for this file- is 'float' for almost all files.
+ float_func = get_float_func(filepath)
+
+ # Context variables
+ context_material = None
+ context_smooth_group = None
+ context_object = None
+ context_vgroup = None
+
+ # Nurbs
+ context_nurbs = {}
+ nurbs = []
+ context_parm = b'' # used by nurbs too but could be used elsewhere
+
+ # Until we can use sets
+ unique_materials = {}
+ unique_material_images = {}
+ unique_smooth_groups = {}
+ # unique_obects= {} - no use for this variable since the objects are stored in the face.
+
+ # when there are faces that end with \
+ # it means they are multiline-
+ # since we use xreadline we cant skip to the next line
+ # so we need to know whether
+ context_multi_line = b''
+
+ # Per-face handling data.
+ face_vert_loc_indices = None
+ face_vert_nor_indices = None
+ face_vert_tex_indices = None
+ face_vert_nor_valid = face_vert_tex_valid = False
+ face_items_usage = set()
+ face_invalid_blenpoly = None
+ prev_vidx = None
+ face = None
+ vec = []
+
+ progress.enter_substeps(3, "Parsing OBJ file...")
+ with open(filepath, 'rb') as f:
+ for line in f: # .readlines():
+ line_split = line.split()
- if global_matrix is None:
- global_matrix = mathutils.Matrix()
+ if not line_split:
+ continue
- if use_split_objects or use_split_groups:
- use_groups_as_vgroups = False
+ line_start = line_split[0] # we compare with this a _lot_
+
+ if line_start == b'v' or context_multi_line == b'v':
+ context_multi_line = handle_vec(line_start, context_multi_line, line_split, b'v', verts_loc, vec, 3)
+
+ elif line_start == b'vn' or context_multi_line == b'vn':
+ context_multi_line = handle_vec(line_start, context_multi_line, line_split, b'vn', verts_nor, vec, 3)
+
+ elif line_start == b'vt' or context_multi_line == b'vt':
+ context_multi_line = handle_vec(line_start, context_multi_line, line_split, b'vt', verts_tex, vec, 2)
+
+ # Handle faces lines (as faces) and the second+ lines of fa multiline face here
+ # use 'f' not 'f ' because some objs (very rare have 'fo ' for faces)
+ elif line_start == b'f' or context_multi_line == b'f':
+ if not context_multi_line:
+ line_split = line_split[1:]
+ # Instantiate a face
+ face = create_face(context_material, context_smooth_group, context_object)
+ (face_vert_loc_indices, face_vert_nor_indices, face_vert_tex_indices,
+ _1, _2, _3, face_invalid_blenpoly) = face
+ faces.append(face)
+ face_items_usage.clear()
+ # Else, use face_vert_loc_indices and face_vert_tex_indices previously defined and used the obj_face
+
+ context_multi_line = b'f' if strip_slash(line_split) else b''
+
+ for v in line_split:
+ obj_vert = v.split(b'/')
+ idx = int(obj_vert[0]) - 1
+ vert_loc_index = (idx + len(verts_loc) + 1) if (idx < 0) else idx
+ # Add the vertex to the current group
+ # *warning*, this wont work for files that have groups defined around verts
+ if use_groups_as_vgroups and context_vgroup:
+ vertex_groups[context_vgroup].append(vert_loc_index)
+ # This a first round to quick-detect ngons that *may* use a same edge more than once.
+ # Potential candidate will be re-checked once we have done parsing the whole face.
+ if not face_invalid_blenpoly:
+ # If we use more than once a same vertex, invalid ngon is suspected.
+ if vert_loc_index in face_items_usage:
+ face_invalid_blenpoly.append(True)
+ else:
+ face_items_usage.add(vert_loc_index)
+ face_vert_loc_indices.append(vert_loc_index)
+
+ # formatting for faces with normals and textures is
+ # loc_index/tex_index/nor_index
+ if len(obj_vert) > 1 and obj_vert[1]:
+ idx = int(obj_vert[1]) - 1
+ face_vert_tex_indices.append((idx + len(verts_tex) + 1) if (idx < 0) else idx)
+ face_vert_tex_valid = True
+ else:
+ # dummy
+ face_vert_tex_indices.append(0)
- time_main = time.time()
+ if len(obj_vert) > 2 and obj_vert[2]:
+ idx = int(obj_vert[2]) - 1
+ face_vert_nor_indices.append((idx + len(verts_nor) + 1) if (idx < 0) else idx)
+ face_vert_nor_valid = True
+ else:
+ # dummy
+ face_vert_nor_indices.append(0)
+
+ if not context_multi_line:
+ # Clear nor/tex indices in case we had none defined for this face.
+ if not face_vert_nor_valid:
+ face_vert_nor_indices.clear()
+ if not face_vert_tex_valid:
+ face_vert_tex_indices.clear()
+ face_vert_nor_valid = face_vert_tex_valid = False
+
+ # Means we have finished a face, we have to do final check if ngon is suspected to be blender-invalid...
+ if face_invalid_blenpoly:
+ face_invalid_blenpoly.clear()
+ face_items_usage.clear()
+ prev_vidx = face_vert_loc_indices[-1]
+ for vidx in face_vert_loc_indices:
+ edge_key = (prev_vidx, vidx) if (prev_vidx < vidx) else (vidx, prev_vidx)
+ if edge_key in face_items_usage:
+ face_invalid_blenpoly.append(True)
+ break
+ face_items_usage.add(edge_key)
+ prev_vidx = vidx
+
+ elif use_edges and (line_start == b'l' or context_multi_line == b'l'):
+ # very similar to the face load function above with some parts removed
+ if not context_multi_line:
+ line_split = line_split[1:]
+ # Instantiate a face
+ face = create_face(context_material, context_smooth_group, context_object)
+ face_vert_loc_indices = face[0]
+ # XXX A bit hackish, we use special 'value' of face_vert_nor_indices (a single True item) to tag this
+ # as a polyline, and not a regular face...
+ face[1][:] = [True]
+ faces.append(face)
+ # Else, use face_vert_loc_indices previously defined and used the obj_face
+
+ context_multi_line = b'l' if strip_slash(line_split) else b''
+
+ for v in line_split:
+ obj_vert = v.split(b'/')
+ idx = int(obj_vert[0]) - 1
+ face_vert_loc_indices.append((idx + len(verts_loc) + 1) if (idx < 0) else idx)
+
+ elif line_start == b's':
+ if use_smooth_groups:
+ context_smooth_group = line_value(line_split)
+ if context_smooth_group == b'off':
+ context_smooth_group = None
+ elif context_smooth_group: # is not None
+ unique_smooth_groups[context_smooth_group] = None
+
+ elif line_start == b'o':
+ if use_split_objects:
+ context_object = line_value(line_split)
+ # unique_obects[context_object]= None
+
+ elif line_start == b'g':
+ if use_split_groups:
+ context_object = line_value(line.split())
+ # print 'context_object', context_object
+ # unique_obects[context_object]= None
+ elif use_groups_as_vgroups:
+ context_vgroup = line_value(line.split())
+ if context_vgroup and context_vgroup != b'(null)':
+ vertex_groups.setdefault(context_vgroup, [])
+ else:
+ context_vgroup = None # dont assign a vgroup
+
+ elif line_start == b'usemtl':
+ context_material = line_value(line.split())
+ unique_materials[context_material] = None
+ elif line_start == b'mtllib': # usemap or usemat
+ # can have multiple mtllib filenames per line, mtllib can appear more than once,
+ # so make sure only occurrence of material exists
+ material_libs = list(set(material_libs) | set(line.split()[1:]))
+
+ # Nurbs support
+ elif line_start == b'cstype':
+ context_nurbs[b'cstype'] = line_value(line.split()) # 'rat bspline' / 'bspline'
+ elif line_start == b'curv' or context_multi_line == b'curv':
+ curv_idx = context_nurbs[b'curv_idx'] = context_nurbs.get(b'curv_idx', []) # in case were multiline
+
+ if not context_multi_line:
+ context_nurbs[b'curv_range'] = float_func(line_split[1]), float_func(line_split[2])
+ line_split[0:3] = [] # remove first 3 items
+
+ if strip_slash(line_split):
+ context_multi_line = b'curv'
+ else:
+ context_multi_line = b''
- verts_loc = []
- verts_nor = []
- verts_tex = []
- faces = [] # tuples of the faces
- material_libs = [] # filanems to material libs this uses
- vertex_groups = {} # when use_groups_as_vgroups is true
+ for i in line_split:
+ vert_loc_index = int(i) - 1
- # Get the string to float conversion func for this file- is 'float' for almost all files.
- float_func = get_float_func(filepath)
+ if vert_loc_index < 0:
+ vert_loc_index = len(verts_loc) + vert_loc_index + 1
- # Context variables
- context_material = None
- context_smooth_group = None
- context_object = None
- context_vgroup = None
-
- # Nurbs
- context_nurbs = {}
- nurbs = []
- context_parm = b'' # used by nurbs too but could be used elsewhere
-
- # Until we can use sets
- unique_materials = {}
- unique_material_images = {}
- unique_smooth_groups = {}
- # unique_obects= {} - no use for this variable since the objects are stored in the face.
-
- # when there are faces that end with \
- # it means they are multiline-
- # since we use xreadline we cant skip to the next line
- # so we need to know whether
- context_multi_line = b''
-
- # Per-face handling data.
- face_vert_loc_indices = None
- face_vert_nor_indices = None
- face_vert_tex_indices = None
- face_vert_nor_valid = face_vert_tex_valid = False
- face_items_usage = set()
- face_invalid_blenpoly = None
- prev_vidx = None
- face = None
- vec = []
-
- print("\tparsing obj file...")
- time_sub = time.time()
+ curv_idx.append(vert_loc_index)
- file = open(filepath, 'rb')
- for line in file: # .readlines():
- line_split = line.split()
-
- if not line_split:
- continue
-
- line_start = line_split[0] # we compare with this a _lot_
-
- if line_start == b'v' or context_multi_line == b'v':
- context_multi_line = handle_vec(line_start, context_multi_line, line_split, b'v', verts_loc, vec, 3)
-
- elif line_start == b'vn' or context_multi_line == b'vn':
- context_multi_line = handle_vec(line_start, context_multi_line, line_split, b'vn', verts_nor, vec, 3)
-
- elif line_start == b'vt' or context_multi_line == b'vt':
- context_multi_line = handle_vec(line_start, context_multi_line, line_split, b'vt', verts_tex, vec, 2)
-
- # Handle faces lines (as faces) and the second+ lines of fa multiline face here
- # use 'f' not 'f ' because some objs (very rare have 'fo ' for faces)
- elif line_start == b'f' or context_multi_line == b'f':
- if not context_multi_line:
- line_split = line_split[1:]
- # Instantiate a face
- face = create_face(context_material, context_smooth_group, context_object)
- (face_vert_loc_indices, face_vert_nor_indices, face_vert_tex_indices,
- _1, _2, _3, face_invalid_blenpoly) = face
- faces.append(face)
- face_items_usage.clear()
- # Else, use face_vert_loc_indices and face_vert_tex_indices previously defined and used the obj_face
-
- context_multi_line = b'f' if strip_slash(line_split) else b''
-
- for v in line_split:
- obj_vert = v.split(b'/')
- idx = int(obj_vert[0]) - 1
- vert_loc_index = (idx + len(verts_loc) + 1) if (idx < 0) else idx
- # Add the vertex to the current group
- # *warning*, this wont work for files that have groups defined around verts
- if use_groups_as_vgroups and context_vgroup:
- vertex_groups[context_vgroup].append(vert_loc_index)
- # This a first round to quick-detect ngons that *may* use a same edge more than once.
- # Potential candidate will be re-checked once we have done parsing the whole face.
- if not face_invalid_blenpoly:
- # If we use more than once a same vertex, invalid ngon is suspected.
- if vert_loc_index in face_items_usage:
- face_invalid_blenpoly.append(True)
+ elif line_start == b'parm' or context_multi_line == b'parm':
+ if context_multi_line:
+ context_multi_line = b''
else:
- face_items_usage.add(vert_loc_index)
- face_vert_loc_indices.append(vert_loc_index)
-
- # formatting for faces with normals and textures is
- # loc_index/tex_index/nor_index
- if len(obj_vert) > 1 and obj_vert[1]:
- idx = int(obj_vert[1]) - 1
- face_vert_tex_indices.append((idx + len(verts_tex) + 1) if (idx < 0) else idx)
- face_vert_tex_valid = True
- else:
- # dummy
- face_vert_tex_indices.append(0)
-
- if len(obj_vert) > 2 and obj_vert[2]:
- idx = int(obj_vert[2]) - 1
- face_vert_nor_indices.append((idx + len(verts_nor) + 1) if (idx < 0) else idx)
- face_vert_nor_valid = True
- else:
- # dummy
- face_vert_nor_indices.append(0)
-
- if not context_multi_line:
- # Clear nor/tex indices in case we had none defined for this face.
- if not face_vert_nor_valid:
- face_vert_nor_indices.clear()
- if not face_vert_tex_valid:
- face_vert_tex_indices.clear()
- face_vert_nor_valid = face_vert_tex_valid = False
-
- # Means we have finished a face, we have to do final check if ngon is suspected to be blender-invalid...
- if face_invalid_blenpoly:
- face_invalid_blenpoly.clear()
- face_items_usage.clear()
- prev_vidx = face_vert_loc_indices[-1]
- for vidx in face_vert_loc_indices:
- edge_key = (prev_vidx, vidx) if (prev_vidx < vidx) else (vidx, prev_vidx)
- if edge_key in face_items_usage:
- face_invalid_blenpoly.append(True)
- break
- face_items_usage.add(edge_key)
- prev_vidx = vidx
+ context_parm = line_split[1]
+ line_split[0:2] = [] # remove first 2
- elif use_edges and (line_start == b'l' or context_multi_line == b'l'):
- # very similar to the face load function above with some parts removed
- if not context_multi_line:
- line_split = line_split[1:]
- # Instantiate a face
- face = create_face(context_material, context_smooth_group, context_object)
- face_vert_loc_indices = face[0]
- # XXX A bit hackish, we use special 'value' of face_vert_nor_indices (a single True item) to tag this
- # as a polyline, and not a regular face...
- face[1][:] = [True]
- faces.append(face)
- # Else, use face_vert_loc_indices previously defined and used the obj_face
-
- context_multi_line = b'l' if strip_slash(line_split) else b''
-
- for v in line_split:
- obj_vert = v.split(b'/')
- idx = int(obj_vert[0]) - 1
- face_vert_loc_indices.append((idx + len(verts_loc) + 1) if (idx < 0) else idx)
-
- elif line_start == b's':
- if use_smooth_groups:
- context_smooth_group = line_value(line_split)
- if context_smooth_group == b'off':
- context_smooth_group = None
- elif context_smooth_group: # is not None
- unique_smooth_groups[context_smooth_group] = None
-
- elif line_start == b'o':
- if use_split_objects:
- context_object = line_value(line_split)
- # unique_obects[context_object]= None
-
- elif line_start == b'g':
- if use_split_groups:
- context_object = line_value(line.split())
- # print 'context_object', context_object
- # unique_obects[context_object]= None
- elif use_groups_as_vgroups:
- context_vgroup = line_value(line.split())
- if context_vgroup and context_vgroup != b'(null)':
- vertex_groups.setdefault(context_vgroup, [])
- else:
- context_vgroup = None # dont assign a vgroup
-
- elif line_start == b'usemtl':
- context_material = line_value(line.split())
- unique_materials[context_material] = None
- elif line_start == b'mtllib': # usemap or usemat
- # can have multiple mtllib filenames per line, mtllib can appear more than once,
- # so make sure only occurrence of material exists
- material_libs = list(set(material_libs) | set(line.split()[1:]))
-
- # Nurbs support
- elif line_start == b'cstype':
- context_nurbs[b'cstype'] = line_value(line.split()) # 'rat bspline' / 'bspline'
- elif line_start == b'curv' or context_multi_line == b'curv':
- curv_idx = context_nurbs[b'curv_idx'] = context_nurbs.get(b'curv_idx', []) # in case were multiline
-
- if not context_multi_line:
- context_nurbs[b'curv_range'] = float_func(line_split[1]), float_func(line_split[2])
- line_split[0:3] = [] # remove first 3 items
-
- if strip_slash(line_split):
- context_multi_line = b'curv'
- else:
- context_multi_line = b''
+ if strip_slash(line_split):
+ context_multi_line = b'parm'
+ else:
+ context_multi_line = b''
+
+ if context_parm.lower() == b'u':
+ context_nurbs.setdefault(b'parm_u', []).extend([float_func(f) for f in line_split])
+ elif context_parm.lower() == b'v': # surfaces not supported yet
+ context_nurbs.setdefault(b'parm_v', []).extend([float_func(f) for f in line_split])
+ # else: # may want to support other parm's ?
+
+ elif line_start == b'deg':
+ context_nurbs[b'deg'] = [int(i) for i in line.split()[1:]]
+ elif line_start == b'end':
+ # Add the nurbs curve
+ if context_object:
+ context_nurbs[b'name'] = context_object
+ nurbs.append(context_nurbs)
+ context_nurbs = {}
+ context_parm = b''
+
+ ''' # How to use usemap? depricated?
+ elif line_start == b'usema': # usemap or usemat
+ context_image= line_value(line_split)
+ '''
+
+ progress.step("Done, loading materials and images...")
+
+ create_materials(filepath.encode(), relpath, material_libs, unique_materials,
+ unique_material_images, use_image_search, float_func)
+
+ progress.step("Done, building geometries (verts:%i faces:%i materials: %i smoothgroups:%i) ..." %
+ (len(verts_loc), len(faces), len(unique_materials), len(unique_smooth_groups)))
+
+ # deselect all
+ if bpy.ops.object.select_all.poll():
+ bpy.ops.object.select_all(action='DESELECT')
+
+ scene = context.scene
+ new_objects = [] # put new objects here
+
+ # Split the mesh by objects/materials, may
+ SPLIT_OB_OR_GROUP = bool(use_split_objects or use_split_groups)
+
+ for data in split_mesh(verts_loc, faces, unique_materials, filepath, SPLIT_OB_OR_GROUP):
+ verts_loc_split, faces_split, unique_materials_split, dataname, use_vnor, use_vtex = data
+ # Create meshes from the data, warning 'vertex_groups' wont support splitting
+ #~ print(dataname, use_vnor, use_vtex)
+ create_mesh(new_objects,
+ use_edges,
+ verts_loc_split,
+ verts_nor if use_vnor else [],
+ verts_tex if use_vtex else [],
+ faces_split,
+ unique_materials_split,
+ unique_material_images,
+ unique_smooth_groups,
+ vertex_groups,
+ dataname,
+ )
+
+ # nurbs support
+ for context_nurbs in nurbs:
+ create_nurbs(context_nurbs, verts_loc, new_objects)
+
+ # Create new obj
+ for obj in new_objects:
+ base = scene.objects.link(obj)
+ base.select = True
- for i in line_split:
- vert_loc_index = int(i) - 1
+ # we could apply this anywhere before scaling.
+ obj.matrix_world = global_matrix
- if vert_loc_index < 0:
- vert_loc_index = len(verts_loc) + vert_loc_index + 1
+ scene.update()
- curv_idx.append(vert_loc_index)
+ axis_min = [1000000000] * 3
+ axis_max = [-1000000000] * 3
- elif line_start == b'parm' or context_multi_line == b'parm':
- if context_multi_line:
- context_multi_line = b''
- else:
- context_parm = line_split[1]
- line_split[0:2] = [] # remove first 2
+ if global_clamp_size:
+ # Get all object bounds
+ for ob in new_objects:
+ for v in ob.bound_box:
+ for axis, value in enumerate(v):
+ if axis_min[axis] > value:
+ axis_min[axis] = value
+ if axis_max[axis] < value:
+ axis_max[axis] = value
- if strip_slash(line_split):
- context_multi_line = b'parm'
- else:
- context_multi_line = b''
-
- if context_parm.lower() == b'u':
- context_nurbs.setdefault(b'parm_u', []).extend([float_func(f) for f in line_split])
- elif context_parm.lower() == b'v': # surfaces not supported yet
- context_nurbs.setdefault(b'parm_v', []).extend([float_func(f) for f in line_split])
- # else: # may want to support other parm's ?
-
- elif line_start == b'deg':
- context_nurbs[b'deg'] = [int(i) for i in line.split()[1:]]
- elif line_start == b'end':
- # Add the nurbs curve
- if context_object:
- context_nurbs[b'name'] = context_object
- nurbs.append(context_nurbs)
- context_nurbs = {}
- context_parm = b''
-
- ''' # How to use usemap? depricated?
- elif line_start == b'usema': # usemap or usemat
- context_image= line_value(line_split)
- '''
+ # Scale objects
+ max_axis = max(axis_max[0] - axis_min[0], axis_max[1] - axis_min[1], axis_max[2] - axis_min[2])
+ scale = 1.0
- file.close()
- time_new = time.time()
- print("%.4f sec" % (time_new - time_sub))
- time_sub = time_new
-
- print('\tloading materials and images...')
- create_materials(filepath, relpath, material_libs, unique_materials,
- unique_material_images, use_image_search, float_func)
-
- time_new = time.time()
- print("%.4f sec" % (time_new - time_sub))
- time_sub = time_new
-
- # deselect all
- if bpy.ops.object.select_all.poll():
- bpy.ops.object.select_all(action='DESELECT')
-
- scene = context.scene
- new_objects = [] # put new objects here
-
- print('\tbuilding geometry...\n\tverts:%i faces:%i materials: %i smoothgroups:%i ...' %
- (len(verts_loc), len(faces), len(unique_materials), len(unique_smooth_groups)))
- # Split the mesh by objects/materials, may
- SPLIT_OB_OR_GROUP = bool(use_split_objects or use_split_groups)
-
- for data in split_mesh(verts_loc, faces, unique_materials, filepath, SPLIT_OB_OR_GROUP):
- verts_loc_split, faces_split, unique_materials_split, dataname, use_vnor, use_vtex = data
- # Create meshes from the data, warning 'vertex_groups' wont support splitting
- #~ print(dataname, use_vnor, use_vtex)
- create_mesh(new_objects,
- use_edges,
- verts_loc_split,
- verts_nor if use_vnor else [],
- verts_tex if use_vtex else [],
- faces_split,
- unique_materials_split,
- unique_material_images,
- unique_smooth_groups,
- vertex_groups,
- dataname,
- )
-
- # nurbs support
- for context_nurbs in nurbs:
- create_nurbs(context_nurbs, verts_loc, new_objects)
-
- # Create new obj
- for obj in new_objects:
- base = scene.objects.link(obj)
- base.select = True
-
- # we could apply this anywhere before scaling.
- obj.matrix_world = global_matrix
-
- scene.update()
-
- axis_min = [1000000000] * 3
- axis_max = [-1000000000] * 3
-
- if global_clamp_size:
- # Get all object bounds
- for ob in new_objects:
- for v in ob.bound_box:
- for axis, value in enumerate(v):
- if axis_min[axis] > value:
- axis_min[axis] = value
- if axis_max[axis] < value:
- axis_max[axis] = value
-
- # Scale objects
- max_axis = max(axis_max[0] - axis_min[0], axis_max[1] - axis_min[1], axis_max[2] - axis_min[2])
- scale = 1.0
-
- while global_clamp_size < max_axis * scale:
- scale = scale / 10.0
+ while global_clamp_size < max_axis * scale:
+ scale = scale / 10.0
- for obj in new_objects:
- obj.scale = scale, scale, scale
+ for obj in new_objects:
+ obj.scale = scale, scale, scale
- time_new = time.time()
+ progress.leave_substeps("Done.")
+ progress.leave_substeps("Finished importing: %r" % filepath)
- print("finished importing: %r in %.4f sec." % (filepath, (time_new - time_main)))
return {'FINISHED'}