Welcome to mirror list, hosted at ThFree Co, Russian Federation.

git.blender.org/blender-addons.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
Diffstat (limited to 'io_scene_obj/import_obj.py')
-rw-r--r--io_scene_obj/import_obj.py523
1 files changed, 209 insertions, 314 deletions
diff --git a/io_scene_obj/import_obj.py b/io_scene_obj/import_obj.py
index 6e26a4fc..3185e5c2 100644
--- a/io_scene_obj/import_obj.py
+++ b/io_scene_obj/import_obj.py
@@ -87,21 +87,24 @@ def obj_image_load(context_imagepath_map, line, DIR, recursive, relpath):
def create_materials(filepath, relpath,
- material_libs, unique_materials, unique_material_images,
- use_image_search, use_cycles, float_func):
+ material_libs, unique_materials,
+ use_image_search, float_func):
"""
Create all the used materials in this obj,
assign colors and images to the materials from all referenced material libs
"""
+ from math import sqrt
+ from bpy_extras import node_shader_utils
+
DIR = os.path.dirname(filepath)
context_material_vars = set()
# Don't load the same image multiple times
context_imagepath_map = {}
- cycles_material_wrap_map = {}
+ nodal_material_wrap_map = {}
- def load_material_image(blender_material, mat_wrap, use_cycles, context_material_name, img_data, line, type):
+ def load_material_image(blender_material, mat_wrap, context_material_name, img_data, line, type):
"""
Set textures defined in .mtl file.
"""
@@ -120,105 +123,47 @@ def create_materials(filepath, relpath,
# Absolute path - c:\.. etc would work here
image = obj_image_load(context_imagepath_map, line, DIR, use_image_search, relpath)
- texture = bpy.data.textures.new(name=type, type='IMAGE')
- if image is not None:
- texture.image = image
-
map_offset = map_options.get(b'-o')
map_scale = map_options.get(b'-s')
+ def _generic_tex_set(nodetex, image, texcoords, translation, scale):
+ nodetex.image = image
+ nodetex.texcoords = texcoords
+ if translation is not None:
+ nodetex.translation = translation
+ if scale is not None:
+ nodetex.scale = scale
+
# Adds textures for materials (rendering)
if type == 'Kd':
- if use_cycles:
- mat_wrap.diffuse_image_set(image)
- mat_wrap.diffuse_mapping_set(coords='UV', translation=map_offset, scale=map_scale)
-
- mtex = blender_material.texture_slots.add()
- mtex.texture = texture
- mtex.texture_coords = 'UV'
- mtex.use_map_color_diffuse = True
-
- # adds textures to faces (Textured/Alt-Z mode)
- # Only apply the diffuse texture to the face if the image has not been set with the inline usemat func.
- unique_material_images[context_material_name] = image # set the texface image
+ _generic_tex_set(mat_wrap.base_color_texture, image, 'UV', map_offset, map_scale)
elif type == 'Ka':
- if use_cycles:
- # XXX Not supported?
- print("WARNING, currently unsupported ambient texture, skipped.")
-
- mtex = blender_material.texture_slots.add()
- mtex.use_map_color_diffuse = False
- mtex.texture = texture
- mtex.texture_coords = 'UV'
- mtex.use_map_ambient = True
+ # XXX Not supported?
+ print("WARNING, currently unsupported ambient texture, skipped.")
elif type == 'Ks':
- if use_cycles:
- mat_wrap.specular_image_set(image)
- mat_wrap.specular_mapping_set(coords='UV', translation=map_offset, scale=map_scale)
-
- mtex = blender_material.texture_slots.add()
- mtex.use_map_color_diffuse = False
- mtex.texture = texture
- mtex.texture_coords = 'UV'
- mtex.use_map_color_spec = True
+ _generic_tex_set(mat_wrap.specular_texture, image, 'UV', map_offset, map_scale)
elif type == 'Ke':
- if use_cycles:
- # XXX Not supported?
- print("WARNING, currently unsupported emit texture, skipped.")
-
- mtex = blender_material.texture_slots.add()
- mtex.use_map_color_diffuse = False
- mtex.texture = texture
- mtex.texture_coords = 'UV'
- mtex.use_map_emit = True
+ # XXX Not supported?
+ print("WARNING, currently unsupported emit texture, skipped.")
elif type == 'Bump':
bump_mult = map_options.get(b'-bm')
bump_mult = float(bump_mult[0]) if (bump_mult and len(bump_mult[0]) > 1) else 1.0
+ mat_wrap.normalmap_strength_set(bump_mult)
- if use_cycles:
- mat_wrap.normal_image_set(image)
- mat_wrap.normal_mapping_set(coords='UV', translation=map_offset, scale=map_scale)
- if bump_mult:
- mat_wrap.normal_factor_set(bump_mult)
-
- mtex = blender_material.texture_slots.add()
- mtex.use_map_color_diffuse = False
- mtex.texture = texture
- mtex.texture_coords = 'UV'
- mtex.use_map_normal = True
- if bump_mult:
- mtex.normal_factor = bump_mult
+ _generic_tex_set(mat_wrap.normalmap_texture, image, 'UV', map_offset, map_scale)
elif type == 'D':
- if use_cycles:
- mat_wrap.alpha_image_set(image)
- mat_wrap.alpha_mapping_set(coords='UV', translation=map_offset, scale=map_scale)
-
- mtex = blender_material.texture_slots.add()
- mtex.use_map_color_diffuse = False
- mtex.texture = texture
- mtex.texture_coords = 'UV'
- mtex.use_map_alpha = True
- blender_material.use_transparency = True
- blender_material.transparency_method = 'Z_TRANSPARENCY'
- if "alpha" not in context_material_vars:
- blender_material.alpha = 0.0
- # Todo, unset diffuse material alpha if it has an alpha channel
+ _generic_tex_set(mat_wrap.transmission_texture, image, 'UV', map_offset, map_scale)
elif type == 'disp':
- if use_cycles:
- mat_wrap.bump_image_set(image)
- mat_wrap.bump_mapping_set(coords='UV', translation=map_offset, scale=map_scale)
-
- mtex = blender_material.texture_slots.add()
- mtex.use_map_color_diffuse = False
- mtex.texture = texture
- mtex.texture_coords = 'UV'
- mtex.use_map_displacement = True
+ # XXX Not supported?
+ print("WARNING, currently unsupported displacement texture, skipped.")
+ # ~ mat_wrap.bump_image_set(image)
+ # ~ mat_wrap.bump_mapping_set(coords='UV', translation=map_offset, scale=map_scale)
elif type == 'refl':
map_type = map_options.get(b'-type')
@@ -226,54 +171,25 @@ def create_materials(filepath, relpath,
print("WARNING, unsupported reflection type '%s', defaulting to 'sphere'"
"" % ' '.join(i.decode() for i in map_type))
- if use_cycles:
- mat_wrap.diffuse_image_set(image, projection='SPHERE')
- mat_wrap.diffuse_mapping_set(coords='Reflection', translation=map_offset, scale=map_scale)
+ _generic_tex_set(mat_wrap.base_color_texture, image, 'Reflection', map_offset, map_scale)
+ mat_wrap.base_color_texture.projection = 'SPHERE'
- mtex = blender_material.texture_slots.add()
- mtex.use_map_color_diffuse = False
- mtex.texture = texture
- mtex.texture_coords = 'REFLECTION'
- mtex.use_map_color_diffuse = True
- mtex.mapping = 'SPHERE'
else:
raise Exception("invalid type %r" % type)
- if map_offset:
- mtex.offset.x = float(map_offset[0])
- if len(map_offset) >= 2:
- mtex.offset.y = float(map_offset[1])
- if len(map_offset) >= 3:
- mtex.offset.z = float(map_offset[2])
- if map_scale:
- mtex.scale.x = float(map_scale[0])
- if len(map_scale) >= 2:
- mtex.scale.y = float(map_scale[1])
- if len(map_scale) >= 3:
- mtex.scale.z = float(map_scale[2])
-
- # Add an MTL with the same name as the obj if no MTLs are spesified.
+ # Try to find a MTL with the same name as the OBJ if no MTLs are specified.
temp_mtl = os.path.splitext((os.path.basename(filepath)))[0] + ".mtl"
-
if os.path.exists(os.path.join(DIR, temp_mtl)):
material_libs.add(temp_mtl)
del temp_mtl
# Create new materials
for name in unique_materials: # .keys()
- if name is not None:
- ma = unique_materials[name] = bpy.data.materials.new(name.decode('utf-8', "replace"))
- unique_material_images[name] = None # assign None to all material images to start with, add to later.
- if use_cycles:
- from modules import cycles_shader_compat
- ma_wrap = cycles_shader_compat.CyclesShaderWrapper(ma)
- cycles_material_wrap_map[ma] = ma_wrap
-
-
- # XXX Why was this needed? Cannot find any good reason, and adds stupid empty matslot in case we do not separate
- # mesh (see T44947).
- #~ unique_materials[None] = None
- #~ unique_material_images[None] = None
+ ma_name = "Default OBJ" if name is None else name.decode('utf-8', "replace")
+ ma = unique_materials[name] = bpy.data.materials.new(ma_name)
+ ma_wrap = node_shader_utils.PrincipledBSDFWrapper(ma, is_readonly=False)
+ nodal_material_wrap_map[ma] = ma_wrap
+ ma_wrap.use_nodes = True
for libname in sorted(material_libs):
# print(libname)
@@ -281,13 +197,13 @@ def create_materials(filepath, relpath,
if not os.path.exists(mtlpath):
print("\tMaterial not found MTL: %r" % mtlpath)
else:
- do_ambient = True
+ # Note: with modern Principled BSDF shader, things like ambient, raytrace or fresnel are always 'ON'
+ # (i.e. automatically controlled by other parameters).
do_highlight = False
do_reflection = False
do_transparency = False
do_glass = False
- do_fresnel = False
- do_raytrace = False
+ spec_colors = [0.0, 0.0, 0.0]
emit_colors = [0.0, 0.0, 0.0]
# print('\t\tloading mtl: %e' % mtlpath)
@@ -305,133 +221,113 @@ def create_materials(filepath, relpath,
if line_id == b'newmtl':
# Finalize previous mat, if any.
if context_material:
+ if "specular" in context_material_vars:
+ # XXX This is highly approximated, not sure whether we can do better...
+ # TODO: Find a way to guesstimate best value from diffuse color...
+ # IDEA: Use standard deviation of both spec and diff colors (i.e. how far away they are
+ # from some grey), and apply the the proportion between those two as tint factor?
+ spec = sum(spec_colors) / 3.0
+ # ~ spec_var = math.sqrt(sum((c - spec) ** 2 for c in spec_color) / 3.0)
+ # ~ diff = sum(context_mat_wrap.base_color) / 3.0
+ # ~ diff_var = math.sqrt(sum((c - diff) ** 2 for c in context_mat_wrap.base_color) / 3.0)
+ # ~ tint = min(1.0, spec_var / diff_var)
+ context_mat_wrap.specular = spec
+ context_mat_wrap.specular_tint = 0.0
+ if "roughness" not in context_material_vars:
+ context_mat_wrap.roughness = 0.0
+
+
emit_value = sum(emit_colors) / 3.0
if emit_value > 1e-6:
- if use_cycles:
- print("WARNING, currently unsupported emit value, skipped.")
+ print("WARNING, emit value unsupported by Principled BSDF shader, skipped.")
# We have to adapt it to diffuse color too...
emit_value /= sum(context_material.diffuse_color) / 3.0
- context_material.emit = emit_value
-
- if not do_ambient:
- context_material.ambient = 0.0
+ # ~ context_material.emit = emit_value
+ # FIXME, how else to use this?
if do_highlight:
- if use_cycles:
- context_mat_wrap.hardness_value_set(1.0)
- # FIXME, how else to use this?
- context_material.specular_intensity = 1.0
+ if "specular" not in context_material_vars:
+ context_mat_wrap.specular = 1.0
+ if "roughness" not in context_material_vars:
+ context_mat_wrap.roughness = 0.0
else:
- if use_cycles:
- context_mat_wrap.hardness_value_set(0.0)
+ if "specular" not in context_material_vars:
+ context_mat_wrap.specular = 0.0
+ if "roughness" not in context_material_vars:
+ context_mat_wrap.roughness = 1.0
if do_reflection:
- if use_cycles:
- context_mat_wrap.reflect_factor_set(1.0)
- context_material.raytrace_mirror.use = True
- context_material.raytrace_mirror.reflect_factor = 1.0
+ if "metallic" not in context_material_vars:
+ context_mat_wrap.metallic = 1.0
if do_transparency:
- context_material.use_transparency = True
- context_material.transparency_method = 'RAYTRACE' if do_raytrace else 'Z_TRANSPARENCY'
- if "alpha" not in context_material_vars:
- if use_cycles:
- context_mat_wrap.alpha_value_set(0.0)
- context_material.alpha = 0.0
+ if "ior" not in context_material_vars:
+ context_mat_wrap.ior = 1.0
+ if "transmission" not in context_material_vars:
+ context_mat_wrap.transmission = 1.0
+ # EEVEE only
+ context_material.blend_method = 'BLEND'
if do_glass:
- if use_cycles:
- print("WARNING, currently unsupported glass material, skipped.")
if "ior" not in context_material_vars:
- context_material.raytrace_transparency.ior = 1.5
-
- if do_fresnel:
- if use_cycles:
- print("WARNING, currently unsupported fresnel option, skipped.")
- context_material.raytrace_mirror.fresnel = 1.0 # could be any value for 'ON'
-
- """
- if do_raytrace:
- context_material.use_raytrace = True
- else:
- context_material.use_raytrace = False
- """
- # XXX, this is not following the OBJ spec, but this was
- # written when raytracing wasnt default, annoying to disable for blender users.
- context_material.use_raytrace = True
+ context_mat_wrap.ior = 1.5
context_material_name = line_value(line_split)
context_material = unique_materials.get(context_material_name)
- if use_cycles and context_material is not None:
- context_mat_wrap = cycles_material_wrap_map[context_material]
+ if context_material is not None:
+ context_mat_wrap = nodal_material_wrap_map[context_material]
context_material_vars.clear()
+ spec_colors = [0.0, 0.0, 0.0]
emit_colors[:] = [0.0, 0.0, 0.0]
- do_ambient = True
do_highlight = False
do_reflection = False
do_transparency = False
do_glass = False
- do_fresnel = False
- do_raytrace = False
elif context_material:
# we need to make a material to assign properties to it.
if line_id == b'ka':
- col = (float_func(line_split[1]), float_func(line_split[2]), float_func(line_split[3]))
- if use_cycles:
- context_mat_wrap.reflect_color_set(col)
- context_material.mirror_color = col
- # This is highly approximated, but let's try to stick as close from exporter as possible... :/
- context_material.ambient = sum(context_material.mirror_color) / 3
+ refl = (float_func(line_split[1]) + float_func(line_split[2]) + float_func(line_split[3])) / 3.0
+ context_mat_wrap.metallic = refl
+ context_material_vars.add("metallic")
elif line_id == b'kd':
col = (float_func(line_split[1]), float_func(line_split[2]), float_func(line_split[3]))
- if use_cycles:
- context_mat_wrap.diffuse_color_set(col)
- context_material.diffuse_color = col
- context_material.diffuse_intensity = 1.0
+ context_mat_wrap.base_color = col
elif line_id == b'ks':
- col = (float_func(line_split[1]), float_func(line_split[2]), float_func(line_split[3]))
- if use_cycles:
- context_mat_wrap.specular_color_set(col)
- context_mat_wrap.hardness_value_set(1.0)
- context_material.specular_color = col
- context_material.specular_intensity = 1.0
+ spec_colors[:] = [
+ float_func(line_split[1]), float_func(line_split[2]), float_func(line_split[3])]
+ context_material_vars.add("specular")
elif line_id == b'ke':
# We cannot set context_material.emit right now, we need final diffuse color as well for this.
+ # XXX Unsupported currently
emit_colors[:] = [
float_func(line_split[1]), float_func(line_split[2]), float_func(line_split[3])]
elif line_id == b'ns':
- if use_cycles:
- context_mat_wrap.hardness_value_set(((float_func(line_split[1]) + 3.0) / 50.0) - 0.65)
- context_material.specular_hardness = int((float_func(line_split[1]) * 0.51) + 1)
- elif line_id == b'ni': # Refraction index (between 1 and 3).
- if use_cycles:
- print("WARNING, currently unsupported glass material, skipped.")
- context_material.raytrace_transparency.ior = max(1, min(float_func(line_split[1]), 3))
+ # XXX Totally empirical conversion, trying to adapt it
+ # (from 0.0 - 900.0 OBJ specular exponent range to 1.0 - 0.0 Principled BSDF range)...
+ context_mat_wrap.roughness = 1.0 - (sqrt(float_func(line_split[1])) / 30)
+ context_material_vars.add("roughness")
+ elif line_id == b'ni': # Refraction index (between 0.001 and 10).
+ context_mat_wrap.ior = float_func(line_split[1])
context_material_vars.add("ior")
elif line_id == b'd': # dissolve (transparency)
- if use_cycles:
- context_mat_wrap.alpha_value_set(float_func(line_split[1]))
- context_material.alpha = float_func(line_split[1])
- context_material.use_transparency = True
- context_material.transparency_method = 'Z_TRANSPARENCY'
- context_material_vars.add("alpha")
+ context_mat_wrap.transmission = 1.0 - float_func(line_split[1])
+ context_material_vars.add("transmission")
elif line_id == b'tr': # translucency
- if use_cycles:
- print("WARNING, currently unsupported translucency option, skipped.")
- context_material.translucency = float_func(line_split[1])
+ print("WARNING, currently unsupported 'tr' translucency option, skipped.")
elif line_id == b'tf':
# rgb, filter color, blender has no support for this.
- pass
+ print("WARNING, currently unsupported 'tf' filter color option, skipped.")
elif line_id == b'illum':
illum = get_int(line_split[1])
# inline comments are from the spec, v4.2
if illum == 0:
# Color on and Ambient off
- do_ambient = False
+ print("WARNING, Principled BSDF shader does not support illumination 0 mode "
+ "(colors with no ambient), skipped.")
elif illum == 1:
# Color on and Ambient on
pass
@@ -441,32 +337,25 @@ def create_materials(filepath, relpath,
elif illum == 3:
# Reflection on and Ray trace on
do_reflection = True
- do_raytrace = True
elif illum == 4:
# Transparency: Glass on
# Reflection: Ray trace on
do_transparency = True
do_reflection = True
do_glass = True
- do_raytrace = True
elif illum == 5:
# Reflection: Fresnel on and Ray trace on
do_reflection = True
- do_fresnel = True
- do_raytrace = True
elif illum == 6:
# Transparency: Refraction on
# Reflection: Fresnel off and Ray trace on
do_transparency = True
do_reflection = True
- do_raytrace = True
elif illum == 7:
# Transparency: Refraction on
# Reflection: Fresnel on and Ray trace on
do_transparency = True
do_reflection = True
- do_fresnel = True
- do_raytrace = True
elif illum == 8:
# Reflection on and Ray trace off
do_reflection = True
@@ -474,58 +363,58 @@ def create_materials(filepath, relpath,
# Transparency: Glass on
# Reflection: Ray trace off
do_transparency = True
- do_reflection = True
+ do_reflection = False
do_glass = True
elif illum == 10:
# Casts shadows onto invisible surfaces
-
- # blender can't do this
+ print("WARNING, Principled BSDF shader does not support illumination 10 mode "
+ "(cast shadows on invisible surfaces), skipped.")
pass
elif line_id == b'map_ka':
img_data = line.split()[1:]
if img_data:
- load_material_image(context_material, context_mat_wrap, use_cycles,
+ load_material_image(context_material, context_mat_wrap,
context_material_name, img_data, line, 'Ka')
elif line_id == b'map_ks':
img_data = line.split()[1:]
if img_data:
- load_material_image(context_material, context_mat_wrap, use_cycles,
+ load_material_image(context_material, context_mat_wrap,
context_material_name, img_data, line, 'Ks')
elif line_id == b'map_kd':
img_data = line.split()[1:]
if img_data:
- load_material_image(context_material, context_mat_wrap, use_cycles,
+ load_material_image(context_material, context_mat_wrap,
context_material_name, img_data, line, 'Kd')
elif line_id == b'map_ke':
img_data = line.split()[1:]
if img_data:
- load_material_image(context_material, context_mat_wrap, use_cycles,
+ load_material_image(context_material, context_mat_wrap,
context_material_name, img_data, line, 'Ke')
elif line_id in {b'map_bump', b'bump'}: # 'bump' is incorrect but some files use it.
img_data = line.split()[1:]
if img_data:
- load_material_image(context_material, context_mat_wrap, use_cycles,
+ load_material_image(context_material, context_mat_wrap,
context_material_name, img_data, line, 'Bump')
elif line_id in {b'map_d', b'map_tr'}: # Alpha map - Dissolve
img_data = line.split()[1:]
if img_data:
- load_material_image(context_material, context_mat_wrap, use_cycles,
+ load_material_image(context_material, context_mat_wrap,
context_material_name, img_data, line, 'D')
elif line_id in {b'map_disp', b'disp'}: # displacementmap
img_data = line.split()[1:]
if img_data:
- load_material_image(context_material, context_mat_wrap, use_cycles,
+ load_material_image(context_material, context_mat_wrap,
context_material_name, img_data, line, 'disp')
elif line_id in {b'map_refl', b'refl'}: # reflectionmap
img_data = line.split()[1:]
if img_data:
- load_material_image(context_material, context_mat_wrap, use_cycles,
+ load_material_image(context_material, context_mat_wrap,
context_material_name, img_data, line, 'refl')
else:
- print("\t%r:%r (ignored)" % (filepath, line))
+ print("WARNING: %r:%r (ignored)" % (filepath, line))
mtl.close()
@@ -538,8 +427,8 @@ def split_mesh(verts_loc, faces, unique_materials, filepath, SPLIT_OB_OR_GROUP):
filename = os.path.splitext((os.path.basename(filepath)))[0]
if not SPLIT_OB_OR_GROUP or not faces:
- use_verts_nor = any((False if f[1] is ... else True) for f in faces)
- use_verts_tex = any((False if f[2] is ... else True) for f in faces)
+ use_verts_nor = any(f[1] for f in faces)
+ use_verts_tex = any(f[2] for f in faces)
# use the filename for the object name since we aren't chopping up the mesh.
return [(verts_loc, faces, unique_materials, filename, use_verts_nor, use_verts_tex)]
@@ -556,7 +445,15 @@ def split_mesh(verts_loc, faces, unique_materials, filepath, SPLIT_OB_OR_GROUP):
oldkey = -1 # initialize to a value that will never match the key
for face in faces:
- key = face[5]
+ (face_vert_loc_indices,
+ face_vert_nor_indices,
+ face_vert_tex_indices,
+ context_material,
+ context_smooth_group,
+ context_object,
+ face_invalid_blenpoly,
+ ) = face
+ key = context_object
if oldkey != key:
# Check the key has changed.
@@ -564,27 +461,25 @@ def split_mesh(verts_loc, faces, unique_materials, filepath, SPLIT_OB_OR_GROUP):
use_verts_nor, use_verts_tex) = face_split_dict.setdefault(key, ([], [], {}, {}, [], []))
oldkey = key
- face_vert_loc_indices = face[0]
- if not use_verts_nor and face[1] is not ...:
+ if not use_verts_nor and face_vert_nor_indices:
use_verts_nor.append(True)
- if not use_verts_tex and face[2] is not ...:
+ if not use_verts_tex and face_vert_tex_indices:
use_verts_tex.append(True)
# Remap verts to new vert list and add where needed
- for enum, i in enumerate(face_vert_loc_indices):
- map_index = vert_remap.get(i)
+ for loop_idx, vert_idx in enumerate(face_vert_loc_indices):
+ map_index = vert_remap.get(vert_idx)
if map_index is None:
map_index = len(verts_split)
- vert_remap[i] = map_index # set the new remapped index so we only add once and can reference next time.
- verts_split.append(verts_loc[i]) # add the vert to the local verts
+ vert_remap[vert_idx] = map_index # set the new remapped index so we only add once and can reference next time.
+ verts_split.append(verts_loc[vert_idx]) # add the vert to the local verts
- face_vert_loc_indices[enum] = map_index # remap to the local index
+ face_vert_loc_indices[loop_idx] = map_index # remap to the local index
- matname = face[3]
- if matname and matname not in unique_materials_split:
- unique_materials_split[matname] = unique_materials[matname]
+ if context_material not in unique_materials_split:
+ unique_materials_split[context_material] = unique_materials[context_material]
faces_split.append(face)
@@ -601,7 +496,6 @@ def create_mesh(new_objects,
verts_tex,
faces,
unique_materials,
- unique_material_images,
unique_smooth_groups,
vertex_groups,
dataname,
@@ -664,7 +558,7 @@ def create_mesh(new_objects,
# ignore triangles with invalid indices
if len(face_vert_loc_indices) > 3:
from bpy_extras.mesh_utils import ngon_tessellate
- ngon_face_indices = ngon_tessellate(verts_loc, face_vert_loc_indices)
+ ngon_face_indices = ngon_tessellate(verts_loc, face_vert_loc_indices, debug_print=bpy.app.debug)
faces.extend([([face_vert_loc_indices[ngon[0]],
face_vert_loc_indices[ngon[1]],
face_vert_loc_indices[ngon[2]],
@@ -734,68 +628,37 @@ def create_mesh(new_objects,
# verts_loc is a list of (x, y, z) tuples
me.vertices.foreach_set("co", unpack_list(verts_loc))
- loops_vert_idx = []
+ loops_vert_idx = tuple(vidx for (face_vert_loc_indices, _, _, _, _, _, _) in faces for vidx in face_vert_loc_indices)
faces_loop_start = []
- faces_loop_total = []
lidx = 0
for f in faces:
- vidx = f[0]
- nbr_vidx = len(vidx)
- loops_vert_idx.extend(vidx)
+ face_vert_loc_indices = f[0]
+ nbr_vidx = len(face_vert_loc_indices)
faces_loop_start.append(lidx)
- faces_loop_total.append(nbr_vidx)
lidx += nbr_vidx
+ faces_loop_total = tuple(len(face_vert_loc_indices) for (face_vert_loc_indices, _, _, _, _, _, _) in faces)
me.loops.foreach_set("vertex_index", loops_vert_idx)
me.polygons.foreach_set("loop_start", faces_loop_start)
me.polygons.foreach_set("loop_total", faces_loop_total)
+ faces_ma_index = tuple(material_mapping[context_material] for (_, _, _, context_material, _, _, _) in faces)
+ me.polygons.foreach_set("material_index", faces_ma_index)
+
+ faces_use_smooth = tuple(bool(context_smooth_group) for (_, _, _, _, context_smooth_group, _, _) in faces)
+ me.polygons.foreach_set("use_smooth", faces_use_smooth)
+
if verts_nor and me.loops:
# Note: we store 'temp' normals in loops, since validate() may alter final mesh,
# we can only set custom lnors *after* calling it.
me.create_normals_split()
+ loops_nor = tuple(no for (_, face_vert_nor_indices, _, _, _, _, _) in faces for face_noidx in face_vert_nor_indices for no in verts_nor[face_noidx])
+ me.loops.foreach_set("normal", loops_nor)
if verts_tex and me.polygons:
- me.uv_textures.new()
-
- context_material_old = -1 # avoid a dict lookup
- mat = 0 # rare case it may be un-initialized.
-
- for i, (face, blen_poly) in enumerate(zip(faces, me.polygons)):
- if len(face[0]) < 3:
- raise Exception("bad face") # Shall not happen, we got rid of those earlier!
-
- (face_vert_loc_indices,
- face_vert_nor_indices,
- face_vert_tex_indices,
- context_material,
- context_smooth_group,
- context_object,
- face_invalid_blenpoly,
- ) = face
-
- if context_smooth_group:
- blen_poly.use_smooth = True
-
- if context_material:
- if context_material_old is not context_material:
- mat = material_mapping[context_material]
- context_material_old = context_material
- blen_poly.material_index = mat
-
- if verts_nor and face_vert_nor_indices:
- for face_noidx, lidx in zip(face_vert_nor_indices, blen_poly.loop_indices):
- me.loops[lidx].normal[:] = verts_nor[0 if (face_noidx is ...) else face_noidx]
-
- if verts_tex and face_vert_tex_indices:
- if context_material:
- image = unique_material_images[context_material]
- if image: # Can be none if the material dosnt have an image.
- me.uv_textures[0].data[i].image = image
-
- blen_uvs = me.uv_layers[0]
- for face_uvidx, lidx in zip(face_vert_tex_indices, blen_poly.loop_indices):
- blen_uvs.data[lidx].uv = verts_tex[0 if (face_uvidx is ...) else face_uvidx]
+ me.uv_layers.new()
+ loops_uv = tuple(uv for (_, _, face_vert_tex_indices, _, _, _, _) in faces for face_uvidx in face_vert_tex_indices for uv in verts_tex[face_uvidx])
+ me.uv_layers[0].data.foreach_set("uv", loops_uv)
use_edges = use_edges and bool(edges)
if use_edges:
@@ -829,7 +692,6 @@ def create_mesh(new_objects,
for e in me.edges:
if e.key in sharp_edges:
e.use_edge_sharp = True
- me.show_edge_sharp = True
if verts_nor:
clnors = array.array('f', [0.0] * (len(me.loops) * 3))
@@ -840,7 +702,6 @@ def create_mesh(new_objects,
me.normals_split_custom_set(tuple(zip(*(iter(clnors),) * 3)))
me.use_auto_smooth = True
- me.show_edge_sharp = True
ob = bpy.data.objects.new(me.name, me)
new_objects.append(ob)
@@ -849,7 +710,7 @@ def create_mesh(new_objects,
# content of the vertex_groups. If the user selects to NOT have vertex groups saved then
# the following test will never run
for group_name, group_indices in vertex_groups.items():
- group = ob.vertex_groups.new(group_name.decode('utf-8', "replace"))
+ group = ob.vertex_groups.new(name=group_name.decode('utf-8', "replace"))
group.add(group_indices, 1.0, 'REPLACE')
@@ -966,14 +827,13 @@ def get_int(svalue):
def load(context,
filepath,
*,
- global_clamp_size=0.0,
+ global_clight_size=0.0,
use_smooth_groups=True,
use_edges=True,
use_split_objects=True,
use_split_groups=True,
use_image_search=True,
use_groups_as_vgroups=False,
- use_cycles=True,
relpath=None,
global_matrix=None
):
@@ -1041,8 +901,8 @@ def load(context,
context_parm = b'' # used by nurbs too but could be used elsewhere
# Until we can use sets
+ use_default_material = False
unique_materials = {}
- unique_material_images = {}
unique_smooth_groups = {}
# unique_obects= {} - no use for this variable since the objects are stored in the face.
@@ -1057,15 +917,19 @@ def load(context,
face_vert_nor_indices = None
face_vert_tex_indices = None
face_vert_nor_valid = face_vert_tex_valid = False
+ verts_loc_len = verts_nor_len = verts_tex_len = 0
face_items_usage = set()
face_invalid_blenpoly = None
prev_vidx = None
face = None
vec = []
+ quick_vert_failures = 0
+ skip_quick_vert = False
+
progress.enter_substeps(3, "Parsing OBJ file...")
with open(filepath, 'rb') as f:
- for line in f: # .readlines():
+ for line in f:
line_split = line.split()
if not line_split:
@@ -1073,17 +937,39 @@ def load(context,
line_start = line_split[0] # we compare with this a _lot_
- if line_start == b'v' or context_multi_line == b'v':
- context_multi_line = handle_vec(line_start, context_multi_line, line_split, b'v', verts_loc, vec, 3)
-
- elif line_start == b'vn' or context_multi_line == b'vn':
- context_multi_line = handle_vec(line_start, context_multi_line, line_split, b'vn', verts_nor, vec, 3)
+ # Handling vertex data are pretty similar, factorize that.
+ # Also, most OBJ files store all those on a single line, so try fast parsing for that first,
+ # and only fallback to full multi-line parsing when needed, this gives significant speed-up
+ # (~40% on affected code).
+ if line_start == b'v':
+ vdata, vdata_len, do_quick_vert = (verts_loc, 3, not skip_quick_vert)
+ elif line_start == b'vn':
+ vdata, vdata_len, do_quick_vert = (verts_nor, 3, not skip_quick_vert)
+ elif line_start == b'vt':
+ vdata, vdata_len, do_quick_vert = verts_tex, 2, not skip_quick_vert
+ elif context_multi_line == b'v':
+ vdata, vdata_len, do_quick_vert = verts_loc, 3, False
+ elif context_multi_line == b'vn':
+ vdata, vdata_len, do_quick_vert = verts_nor, 3, False
+ elif context_multi_line == b'vt':
+ vdata, vdata_len, do_quick_vert = verts_tex, 2, False
+ else:
+ vdata_len = 0
+
+ if vdata_len:
+ if do_quick_vert:
+ try:
+ vdata.append(tuple(map(float_func, line_split[1:vdata_len + 1])))
+ except:
+ do_quick_vert = False
+ # In case we get too many failures on quick parsing, force fallback to full multi-line one.
+ # Exception handling can become costly...
+ quick_vert_failures += 1
+ if quick_vert_failures > 10000:
+ skip_quick_vert = True
+ if not do_quick_vert:
+ context_multi_line = handle_vec(line_start, context_multi_line, line_split, b'v', vdata, vec, vdata_len)
- elif line_start == b'vt' or context_multi_line == b'vt':
- context_multi_line = handle_vec(line_start, context_multi_line, line_split, b'vt', verts_tex, vec, 2)
-
- # Handle faces lines (as faces) and the second+ lines of fa multiline face here
- # use 'f' not 'f ' because some objs (very rare have 'fo ' for faces)
elif line_start == b'f' or context_multi_line == b'f':
if not context_multi_line:
line_split = line_split[1:]
@@ -1093,14 +979,19 @@ def load(context,
_1, _2, _3, face_invalid_blenpoly) = face
faces.append(face)
face_items_usage.clear()
+ verts_loc_len = len(verts_loc)
+ verts_nor_len = len(verts_nor)
+ verts_tex_len = len(verts_tex)
+ if context_material is None:
+ use_default_material = True
# Else, use face_vert_loc_indices and face_vert_tex_indices previously defined and used the obj_face
context_multi_line = b'f' if strip_slash(line_split) else b''
for v in line_split:
obj_vert = v.split(b'/')
- idx = int(obj_vert[0]) - 1
- vert_loc_index = (idx + len(verts_loc) + 1) if (idx < 0) else idx
+ idx = int(obj_vert[0]) # Note that we assume here we cannot get OBJ invalid 0 index...
+ vert_loc_index = (idx + verts_loc_len) if (idx < 1) else idx - 1
# Add the vertex to the current group
# *warning*, this wont work for files that have groups defined around verts
if use_groups_as_vgroups and context_vgroup:
@@ -1118,18 +1009,18 @@ def load(context,
# formatting for faces with normals and textures is
# loc_index/tex_index/nor_index
if len(obj_vert) > 1 and obj_vert[1] and obj_vert[1] != b'0':
- idx = int(obj_vert[1]) - 1
- face_vert_tex_indices.append((idx + len(verts_tex) + 1) if (idx < 0) else idx)
+ idx = int(obj_vert[1])
+ face_vert_tex_indices.append((idx + verts_tex_len) if (idx < 1) else idx - 1)
face_vert_tex_valid = True
else:
- face_vert_tex_indices.append(...)
+ face_vert_tex_indices.append(0)
if len(obj_vert) > 2 and obj_vert[2] and obj_vert[2] != b'0':
- idx = int(obj_vert[2]) - 1
- face_vert_nor_indices.append((idx + len(verts_nor) + 1) if (idx < 0) else idx)
+ idx = int(obj_vert[2])
+ face_vert_nor_indices.append((idx + verts_nor_len) if (idx < 1) else idx - 1)
face_vert_nor_valid = True
else:
- face_vert_nor_indices.append(...)
+ face_vert_nor_indices.append(0)
if not context_multi_line:
# Clear nor/tex indices in case we had none defined for this face.
@@ -1263,8 +1154,10 @@ def load(context,
progress.step("Done, loading materials and images...")
+ if use_default_material:
+ unique_materials[None] = None
create_materials(filepath, relpath, material_libs, unique_materials,
- unique_material_images, use_image_search, use_cycles, float_func)
+ use_image_search, float_func)
progress.step("Done, building geometries (verts:%i faces:%i materials: %i smoothgroups:%i) ..." %
(len(verts_loc), len(faces), len(unique_materials), len(unique_smooth_groups)))
@@ -1290,7 +1183,6 @@ def load(context,
verts_tex if use_vtex else [],
faces_split,
unique_materials_split,
- unique_material_images,
unique_smooth_groups,
vertex_groups,
dataname,
@@ -1300,10 +1192,13 @@ def load(context,
for context_nurbs in nurbs:
create_nurbs(context_nurbs, verts_loc, new_objects)
+ view_layer = context.view_layer
+ collection = view_layer.active_layer_collection.collection
+
# Create new obj
for obj in new_objects:
- base = scene.objects.link(obj)
- base.select = True
+ collection.objects.link(obj)
+ obj.select_set(True)
# we could apply this anywhere before scaling.
obj.matrix_world = global_matrix
@@ -1313,7 +1208,7 @@ def load(context,
axis_min = [1000000000] * 3
axis_max = [-1000000000] * 3
- if global_clamp_size:
+ if global_clight_size:
# Get all object bounds
for ob in new_objects:
for v in ob.bound_box:
@@ -1327,7 +1222,7 @@ def load(context,
max_axis = max(axis_max[0] - axis_min[0], axis_max[1] - axis_min[1], axis_max[2] - axis_min[2])
scale = 1.0
- while global_clamp_size < max_axis * scale:
+ while global_clight_size < max_axis * scale:
scale = scale / 10.0
for obj in new_objects: