Welcome to mirror list, hosted at ThFree Co, Russian Federation.

git.blender.org/blender-addons.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorPeter Kim <pk15950@gmail.com>2022-03-10 13:59:31 +0300
committerPeter Kim <pk15950@gmail.com>2022-03-10 13:59:31 +0300
commit96209d6e836f2504390fdcc8410b0a7475668a74 (patch)
tree62f270f010c0ce7cae18a1408ded79e38e202990
parent604ae8d2bd658188148e67a667c67a738e22b34d (diff)
parentc34f0b90d49938eccf9c1a9ae59b055155e00f9b (diff)
Merge branch 'master' into xr-dev
-rw-r--r--amaranth/__init__.py2
-rw-r--r--development_iskeyfree.py8
-rw-r--r--io_anim_bvh/__init__.py2
-rw-r--r--io_anim_bvh/import_bvh.py4
-rw-r--r--io_anim_camera.py39
-rw-r--r--io_scene_fbx/__init__.py2
-rw-r--r--io_scene_fbx/import_fbx.py8
-rwxr-xr-xio_scene_gltf2/__init__.py4
-rwxr-xr-xio_scene_gltf2/blender/com/gltf2_blender_math.py18
-rwxr-xr-xio_scene_gltf2/blender/exp/gltf2_blender_export_keys.py1
-rwxr-xr-xio_scene_gltf2/blender/exp/gltf2_blender_extract.py8
-rwxr-xr-xio_scene_gltf2/blender/exp/gltf2_blender_gather.py52
-rwxr-xr-xio_scene_gltf2/blender/exp/gltf2_blender_gather_animation_channel_target.py35
-rwxr-xr-xio_scene_gltf2/blender/exp/gltf2_blender_gather_animation_channels.py131
-rwxr-xr-xio_scene_gltf2/blender/exp/gltf2_blender_gather_animation_sampler_keyframes.py196
-rwxr-xr-xio_scene_gltf2/blender/exp/gltf2_blender_gather_animation_samplers.py101
-rwxr-xr-xio_scene_gltf2/blender/exp/gltf2_blender_gather_animations.py84
-rwxr-xr-xio_scene_gltf2/blender/exp/gltf2_blender_gather_cache.py184
-rw-r--r--io_scene_gltf2/blender/exp/gltf2_blender_gather_drivers.py18
-rwxr-xr-xio_scene_gltf2/blender/exp/gltf2_blender_gather_joints.py78
-rwxr-xr-xio_scene_gltf2/blender/exp/gltf2_blender_gather_materials.py16
-rwxr-xr-xio_scene_gltf2/blender/exp/gltf2_blender_gather_materials_pbr_metallic_roughness.py11
-rwxr-xr-xio_scene_gltf2/blender/exp/gltf2_blender_gather_mesh.py71
-rwxr-xr-xio_scene_gltf2/blender/exp/gltf2_blender_gather_nodes.py329
-rwxr-xr-xio_scene_gltf2/blender/exp/gltf2_blender_gather_primitives.py49
-rwxr-xr-xio_scene_gltf2/blender/exp/gltf2_blender_gather_skins.py143
-rw-r--r--io_scene_gltf2/blender/exp/gltf2_blender_gather_tree.py374
-rwxr-xr-xio_scene_gltf2/blender/exp/gltf2_blender_get.py32
-rw-r--r--magic_uv/__init__.py2
-rw-r--r--measureit/__init__.py3
-rw-r--r--mesh_snap_utilities_line/__init__.py2
-rw-r--r--mesh_tiny_cad/__init__.py2
-rw-r--r--mesh_tissue/README.md47
-rw-r--r--mesh_tissue/__init__.py135
-rw-r--r--mesh_tissue/colors_groups_exchanger.py2468
-rw-r--r--mesh_tissue/config.py63
-rw-r--r--mesh_tissue/curves_tools.py803
-rw-r--r--mesh_tissue/dual_mesh.py92
-rw-r--r--mesh_tissue/lattice.py57
-rw-r--r--mesh_tissue/material_tools.py231
-rw-r--r--mesh_tissue/numba_functions.py380
-rw-r--r--mesh_tissue/polyhedra.py557
-rw-r--r--mesh_tissue/tessellate_numpy.py5394
-rw-r--r--mesh_tissue/tissue_properties.py1060
-rw-r--r--mesh_tissue/utils.py1514
-rw-r--r--mesh_tissue/utils_pip.py154
-rw-r--r--mesh_tissue/uv_to_mesh.py64
-rw-r--r--mesh_tissue/weight_tools.py4681
-rw-r--r--node_wrangler.py45
-rw-r--r--precision_drawing_tools/__init__.py2
-rw-r--r--real_snow.py2
-rw-r--r--viewport_vr_preview/action_map_io.py2
-rw-r--r--viewport_vr_preview/defaults.py2
53 files changed, 13980 insertions, 5782 deletions
diff --git a/amaranth/__init__.py b/amaranth/__init__.py
index 6622c1e4..41530937 100644
--- a/amaranth/__init__.py
+++ b/amaranth/__init__.py
@@ -79,7 +79,7 @@ bl_info = {
"location": "Everywhere!",
"description": "A collection of tools and settings to improve productivity",
"warning": "",
- "doc_url": "https://pablovazquez.art/amaranth",
+ "doc_url": "{BLENDER_MANUAL_URL}/addons/interface/amaranth.html",
"tracker_url": "https://developer.blender.org/maniphest/task/edit/form/2/",
"category": "Interface",
}
diff --git a/development_iskeyfree.py b/development_iskeyfree.py
index a9201ab6..8c8290d0 100644
--- a/development_iskeyfree.py
+++ b/development_iskeyfree.py
@@ -130,9 +130,8 @@ class MyChecker():
"LEFTMOUSE", "MIDDLEMOUSE", "RIGHTMOUSE", "BUTTON4MOUSE", "BUTTON5MOUSE", "BUTTON6MOUSE",
"BUTTON7MOUSE",
"MOUSEMOVE", "INBETWEEN_MOUSEMOVE", "TRACKPADPAN", "TRACKPADZOOM",
- "MOUSEROTATE", "WHEELUPMOUSE", "WHEELDOWNMOUSE", "WHEELINMOUSE", "WHEELOUTMOUSE", "EVT_TWEAK_L",
- "EVT_TWEAK_M", "EVT_TWEAK_R", "A", "B", "C", "D", "E", "F", "G", "H",
- "I", "J",
+ "MOUSEROTATE", "WHEELUPMOUSE", "WHEELDOWNMOUSE", "WHEELINMOUSE", "WHEELOUTMOUSE",
+ "A", "B", "C", "D", "E", "F", "G", "H", "I", "J",
"K", "L", "M", "N", "O", "P", "Q", "R", "S", "T", "U", "V", "W", "X", "Y", "Z", "ZERO", "ONE", "TWO",
"THREE", "FOUR", "FIVE", "SIX", "SEVEN", "EIGHT", "NINE", "LEFT_CTRL", "LEFT_ALT", "LEFT_SHIFT",
"RIGHT_ALT",
@@ -303,9 +302,6 @@ class IskeyFreeProperties(PropertyGroup):
("WHEELDOWNMOUSE", "WHEELDOWNMOUSE", ""),
("WHEELINMOUSE", "WHEELINMOUSE", ""),
("WHEELOUTMOUSE", "WHEELOUTMOUSE", ""),
- ("EVT_TWEAK_L", "EVT_TWEAK_L", ""),
- ("EVT_TWEAK_M", "EVT_TWEAK_M", ""),
- ("EVT_TWEAK_R", "EVT_TWEAK_R", ""),
("A", "A", ""),
("B", "B", ""),
("C", "C", ""),
diff --git a/io_anim_bvh/__init__.py b/io_anim_bvh/__init__.py
index 43e582d5..8f712e3d 100644
--- a/io_anim_bvh/__init__.py
+++ b/io_anim_bvh/__init__.py
@@ -5,7 +5,7 @@
bl_info = {
"name": "BioVision Motion Capture (BVH) format",
"author": "Campbell Barton",
- "version": (1, 0, 0),
+ "version": (1, 0, 1),
"blender": (2, 81, 6),
"location": "File > Import-Export",
"description": "Import-Export BVH from armature objects",
diff --git a/io_anim_bvh/import_bvh.py b/io_anim_bvh/import_bvh.py
index cd30ad11..2f335513 100644
--- a/io_anim_bvh/import_bvh.py
+++ b/io_anim_bvh/import_bvh.py
@@ -569,7 +569,7 @@ def bvh_node_dict2armature(
# For each location x, y, z.
for axis_i in range(3):
- curve = action.fcurves.new(data_path=data_path, index=axis_i)
+ curve = action.fcurves.new(data_path=data_path, index=axis_i, action_group=bvh_node.name)
keyframe_points = curve.keyframe_points
keyframe_points.add(num_frame)
@@ -615,7 +615,7 @@ def bvh_node_dict2armature(
# For each euler angle x, y, z (or quaternion w, x, y, z).
for axis_i in range(len(rotate[0])):
- curve = action.fcurves.new(data_path=data_path, index=axis_i)
+ curve = action.fcurves.new(data_path=data_path, index=axis_i, action_group=bvh_node.name)
keyframe_points = curve.keyframe_points
keyframe_points.add(num_frame)
diff --git a/io_anim_camera.py b/io_anim_camera.py
index 7973bec2..fbc7eaab 100644
--- a/io_anim_camera.py
+++ b/io_anim_camera.py
@@ -19,23 +19,23 @@ bl_info = {
import bpy
-def write_cameras(context, filepath, frame_start, frame_end, only_selected=False):
+def write_cameras(context, fh, frame_start, frame_end, only_selected=False):
data_attrs = (
- 'lens',
- 'shift_x',
- 'shift_y',
- 'dof_distance',
- 'clip_start',
- 'clip_end',
- 'display_size',
- )
+ "lens",
+ "shift_x",
+ "shift_y",
+ "dof.focus_distance",
+ "clip_start",
+ "clip_end",
+ "display_size",
+ )
obj_attrs = (
- 'hide_render',
- )
+ "hide_render",
+ )
- fw = open(filepath, 'w').write
+ fw = fh.write
scene = bpy.context.scene
@@ -60,7 +60,7 @@ def write_cameras(context, filepath, frame_start, frame_end, only_selected=False
for obj, obj_data in cameras:
fw("data = bpy.data.cameras.new(%r)\n" % obj.name)
for attr in data_attrs:
- fw("data.%s = %s\n" % (attr, repr(getattr(obj_data, attr))))
+ fw("data.%s = %s\n" % (attr, repr(obj_data.path_resolve(attr))))
fw("obj = bpy.data.objects.new(%r, data)\n" % obj.name)
@@ -77,21 +77,21 @@ def write_cameras(context, filepath, frame_start, frame_end, only_selected=False
fw("scene.frame_set(%d + frame)\n" % f)
for obj, obj_data in cameras:
- fw("obj = cameras['%s']\n" % obj.name)
+ fw("obj = cameras[%r]\n" % obj.name)
matrix = obj.matrix_world.copy()
fw("obj.location = %r, %r, %r\n" % matrix.to_translation()[:])
fw("obj.scale = %r, %r, %r\n" % matrix.to_scale()[:])
fw("obj.rotation_euler = %r, %r, %r\n" % matrix.to_euler()[:])
- fw("obj.keyframe_insert('location')\n")
- fw("obj.keyframe_insert('scale')\n")
- fw("obj.keyframe_insert('rotation_euler')\n")
+ fw("obj.keyframe_insert(\"location\")\n")
+ fw("obj.keyframe_insert(\"scale\")\n")
+ fw("obj.keyframe_insert(\"rotation_euler\")\n")
# only key the angle
fw("data = obj.data\n")
fw("data.lens = %s\n" % obj_data.lens)
- fw("data.keyframe_insert('lens')\n")
+ fw("data.keyframe_insert(\"lens\")\n")
fw("\n")
@@ -129,7 +129,8 @@ class CameraExporter(bpy.types.Operator, ExportHelper):
default=True)
def execute(self, context):
- write_cameras(context, self.filepath, self.frame_start, self.frame_end, self.only_selected)
+ with open(self.filepath, 'w', encoding='utf-8') as fh:
+ write_cameras(context, fh, self.frame_start, self.frame_end, self.only_selected)
return {'FINISHED'}
def invoke(self, context, event):
diff --git a/io_scene_fbx/__init__.py b/io_scene_fbx/__init__.py
index cf7b75a8..0abafc33 100644
--- a/io_scene_fbx/__init__.py
+++ b/io_scene_fbx/__init__.py
@@ -5,7 +5,7 @@
bl_info = {
"name": "FBX format",
"author": "Campbell Barton, Bastien Montagne, Jens Restemeier",
- "version": (4, 33, 1),
+ "version": (4, 34, 2),
"blender": (3, 2, 0),
"location": "File > Import-Export",
"description": "FBX IO meshes, UV's, vertex colors, materials, textures, cameras, lamps and actions",
diff --git a/io_scene_fbx/import_fbx.py b/io_scene_fbx/import_fbx.py
index 7efd3ab1..f2726047 100644
--- a/io_scene_fbx/import_fbx.py
+++ b/io_scene_fbx/import_fbx.py
@@ -591,7 +591,7 @@ def blen_read_animations_action_item(action, item, cnodes, fps, anim_offset, glo
bl_obj = item.bl_obj
# We want to create actions for objects, but for bones we 'reuse' armatures' actions!
- grpname = item.bl_obj.name
+ grpname = bl_obj.name
# Since we might get other channels animated in the end, due to all FBX transform magic,
# we need to add curves for whole loc/rot/scale in any case.
@@ -1431,9 +1431,9 @@ def blen_read_material(fbx_tmpl, fbx_obj, settings):
# No specular color in Principled BSDF shader, assumed to be either white or take some tint from diffuse one...
# TODO: add way to handle tint option (guesstimate from spec color + intensity...)?
ma_wrap.specular = elem_props_get_number(fbx_props, b'SpecularFactor', 0.25) * 2.0
- # XXX Totally empirical conversion, trying to adapt it
- # (from 1.0 - 0.0 Principled BSDF range to 0.0 - 100.0 FBX shininess range)...
- fbx_shininess = elem_props_get_number(fbx_props, b'Shininess', 20.0)
+ # XXX Totally empirical conversion, trying to adapt it (and protect against invalid negative values, see T96076):
+ # From [1.0 - 0.0] Principled BSDF range to [0.0 - 100.0] FBX shininess range)...
+ fbx_shininess = max(elem_props_get_number(fbx_props, b'Shininess', 20.0), 0.0)
ma_wrap.roughness = 1.0 - (sqrt(fbx_shininess) / 10.0)
# Sweetness... Looks like we are not the only ones to not know exactly how FBX is supposed to work (see T59850).
# According to one of its developers, Unity uses that formula to extract alpha value:
diff --git a/io_scene_gltf2/__init__.py b/io_scene_gltf2/__init__.py
index a672be22..b290f946 100755
--- a/io_scene_gltf2/__init__.py
+++ b/io_scene_gltf2/__init__.py
@@ -4,7 +4,7 @@
bl_info = {
'name': 'glTF 2.0 format',
'author': 'Julien Duroure, Scurest, Norbert Nopper, Urs Hanselmann, Moritz Becher, Benjamin Schmithüsen, Jim Eckerlein, and many external contributors',
- "version": (3, 2, 7),
+ "version": (3, 2, 9),
'blender': (3, 1, 0),
'location': 'File > Import-Export',
'description': 'Import-Export as glTF 2.0',
@@ -879,6 +879,8 @@ class GLTF_PT_export_animation_export(bpy.types.Panel):
row = layout.row()
row.active = operator.export_force_sampling
row.prop(operator, 'export_def_bones')
+ if operator.export_force_sampling is False and operator.export_def_bones is True:
+ layout.label(text="Export only deformation bones is not possible when not sampling animation")
class GLTF_PT_export_animation_shapekeys(bpy.types.Panel):
diff --git a/io_scene_gltf2/blender/com/gltf2_blender_math.py b/io_scene_gltf2/blender/com/gltf2_blender_math.py
index 0498e8f8..d2a018a8 100755
--- a/io_scene_gltf2/blender/com/gltf2_blender_math.py
+++ b/io_scene_gltf2/blender/com/gltf2_blender_math.py
@@ -98,7 +98,7 @@ def swizzle_yup_value(value: typing.Any) -> typing.Any:
return value
-def transform(v: typing.Union[Vector, Quaternion], data_path: str, transform: Matrix = Matrix.Identity(4)) -> typing \
+def transform(v: typing.Union[Vector, Quaternion], data_path: str, transform: Matrix = Matrix.Identity(4), need_rotation_correction: bool = False) -> typing \
.Union[Vector, Quaternion]:
"""Manage transformations."""
target = get_target_property_name(data_path)
@@ -116,25 +116,31 @@ def transform(v: typing.Union[Vector, Quaternion], data_path: str, transform: Ma
if transform_func is None:
raise RuntimeError("Cannot transform values at {}".format(data_path))
- return transform_func(v, transform)
+ return transform_func(v, transform, need_rotation_correction)
-def transform_location(location: Vector, transform: Matrix = Matrix.Identity(4)) -> Vector:
+def transform_location(location: Vector, transform: Matrix = Matrix.Identity(4), need_rotation_correction:bool = False) -> Vector:
"""Transform location."""
+ correction = Quaternion((2**0.5/2, -2**0.5/2, 0.0, 0.0))
m = Matrix.Translation(location)
+ if need_rotation_correction:
+ m @= correction.to_matrix().to_4x4()
m = transform @ m
return m.to_translation()
-def transform_rotation(rotation: Quaternion, transform: Matrix = Matrix.Identity(4)) -> Quaternion:
+def transform_rotation(rotation: Quaternion, transform: Matrix = Matrix.Identity(4), need_rotation_correction: bool = False) -> Quaternion:
"""Transform rotation."""
rotation.normalize()
+ correction = Quaternion((2**0.5/2, -2**0.5/2, 0.0, 0.0))
m = rotation.to_matrix().to_4x4()
+ if need_rotation_correction:
+ m @= correction.to_matrix().to_4x4()
m = transform @ m
return m.to_quaternion()
-def transform_scale(scale: Vector, transform: Matrix = Matrix.Identity(4)) -> Vector:
+def transform_scale(scale: Vector, transform: Matrix = Matrix.Identity(4), need_rotation_correction: bool = False) -> Vector:
"""Transform scale."""
m = Matrix.Identity(4)
m[0][0] = scale.x
@@ -145,7 +151,7 @@ def transform_scale(scale: Vector, transform: Matrix = Matrix.Identity(4)) -> Ve
return m.to_scale()
-def transform_value(value: Vector, _: Matrix = Matrix.Identity(4)) -> Vector:
+def transform_value(value: Vector, _: Matrix = Matrix.Identity(4), need_rotation_correction: bool = False) -> Vector:
"""Transform value."""
return value
diff --git a/io_scene_gltf2/blender/exp/gltf2_blender_export_keys.py b/io_scene_gltf2/blender/exp/gltf2_blender_export_keys.py
index 61a9f5bf..812db3f9 100755
--- a/io_scene_gltf2/blender/exp/gltf2_blender_export_keys.py
+++ b/io_scene_gltf2/blender/exp/gltf2_blender_export_keys.py
@@ -19,6 +19,7 @@ VISIBLE = 'gltf_visible'
RENDERABLE = 'gltf_renderable'
ACTIVE_COLLECTION = 'gltf_active_collection'
SKINS = 'gltf_skins'
+DEF_BONES_ONLY = 'gltf_def_bones'
DISPLACEMENT = 'gltf_displacement'
FORCE_SAMPLING = 'gltf_force_sampling'
FRAME_RANGE = 'gltf_frame_range'
diff --git a/io_scene_gltf2/blender/exp/gltf2_blender_extract.py b/io_scene_gltf2/blender/exp/gltf2_blender_extract.py
index f5b69f13..d81bd706 100755
--- a/io_scene_gltf2/blender/exp/gltf2_blender_extract.py
+++ b/io_scene_gltf2/blender/exp/gltf2_blender_extract.py
@@ -9,10 +9,14 @@ from ...io.com.gltf2_io_debug import print_console
from io_scene_gltf2.blender.exp import gltf2_blender_gather_skins
-def extract_primitives(glTF, blender_mesh, library, blender_object, blender_vertex_groups, modifiers, export_settings):
+def extract_primitives(blender_mesh, uuid_for_skined_data, blender_vertex_groups, modifiers, export_settings):
"""Extract primitives from a mesh."""
print_console('INFO', 'Extracting primitive: ' + blender_mesh.name)
+ blender_object = None
+ if uuid_for_skined_data:
+ blender_object = export_settings['vtree'].nodes[uuid_for_skined_data].blender_object
+
use_normals = export_settings[gltf2_blender_export_keys.NORMALS]
if use_normals:
blender_mesh.calc_normals_split()
@@ -57,7 +61,7 @@ def extract_primitives(glTF, blender_mesh, library, blender_object, blender_vert
armature = None
if armature:
- skin = gltf2_blender_gather_skins.gather_skin(armature, export_settings)
+ skin = gltf2_blender_gather_skins.gather_skin(export_settings['vtree'].nodes[uuid_for_skined_data].armature, export_settings)
if not skin:
armature = None
diff --git a/io_scene_gltf2/blender/exp/gltf2_blender_gather.py b/io_scene_gltf2/blender/exp/gltf2_blender_gather.py
index 31c0fa62..f515da8c 100755
--- a/io_scene_gltf2/blender/exp/gltf2_blender_gather.py
+++ b/io_scene_gltf2/blender/exp/gltf2_blender_gather.py
@@ -7,10 +7,12 @@ from io_scene_gltf2.io.com import gltf2_io
from io_scene_gltf2.io.com.gltf2_io_debug import print_console
from io_scene_gltf2.blender.exp import gltf2_blender_gather_nodes
from io_scene_gltf2.blender.exp import gltf2_blender_gather_animations
+from io_scene_gltf2.blender.exp import gltf2_blender_gather_animation_sampler_keyframes
from io_scene_gltf2.blender.exp.gltf2_blender_gather_cache import cached
from ..com.gltf2_blender_extras import generate_extras
from io_scene_gltf2.blender.exp import gltf2_blender_export_keys
from io_scene_gltf2.io.exp.gltf2_io_user_extensions import export_user_extensions
+from io_scene_gltf2.blender.exp import gltf2_blender_gather_tree
def gather_gltf2(export_settings):
@@ -22,12 +24,18 @@ def gather_gltf2(export_settings):
scenes = []
animations = [] # unfortunately animations in gltf2 are just as 'root' as scenes.
active_scene = None
+ store_user_scene = bpy.context.scene
for blender_scene in bpy.data.scenes:
scenes.append(__gather_scene(blender_scene, export_settings))
if export_settings[gltf2_blender_export_keys.ANIMATIONS]:
+ # resetting object cache
+ gltf2_blender_gather_animation_sampler_keyframes.get_object_matrix.reset_cache()
animations += __gather_animations(blender_scene, export_settings)
if bpy.context.scene.name == blender_scene.name:
active_scene = len(scenes) -1
+
+ # restore user scene
+ bpy.context.window.scene = store_user_scene
return active_scene, scenes, animations
@@ -40,14 +48,25 @@ def __gather_scene(blender_scene, export_settings):
nodes=[]
)
- for blender_object in blender_scene.objects:
- if blender_object.parent is None:
- node = gltf2_blender_gather_nodes.gather_node(
- blender_object,
- blender_object.library.name if blender_object.library else None,
- blender_scene, None, export_settings)
- if node is not None:
- scene.nodes.append(node)
+
+ vtree = gltf2_blender_gather_tree.VExportTree(export_settings)
+ vtree.construct(blender_scene)
+ vtree.search_missing_armature() # In case armature are no parented correctly
+
+ export_user_extensions('vtree_before_filter_hook', export_settings, vtree)
+
+ # Now, we can filter tree if needed
+ vtree.filter()
+
+ export_user_extensions('vtree_after_filter_hook', export_settings, vtree)
+
+ export_settings['vtree'] = vtree
+
+ for r in [vtree.nodes[r] for r in vtree.roots]:
+ node = gltf2_blender_gather_nodes.gather_node(
+ r, export_settings)
+ if node is not None:
+ scene.nodes.append(node)
export_user_extensions('gather_scene_hook', export_settings, scene, blender_scene)
@@ -58,15 +77,16 @@ def __gather_animations(blender_scene, export_settings):
animations = []
merged_tracks = {}
- for blender_object in blender_scene.objects:
+ vtree = export_settings['vtree']
+ for obj_uuid in vtree.get_all_objects():
+ blender_object = vtree.nodes[obj_uuid].blender_object
+
+ # Do not manage not exported objects
+ if vtree.nodes[obj_uuid].node is None:
+ continue
- # First check if this object is exported or not. Do not export animation of not exported object
- obj_node = gltf2_blender_gather_nodes.gather_node(blender_object,
- blender_object.library.name if blender_object.library else None,
- blender_scene, None, export_settings)
- if obj_node is not None:
- animations_, merged_tracks = gltf2_blender_gather_animations.gather_animations(blender_object, merged_tracks, len(animations), export_settings)
- animations += animations_
+ animations_, merged_tracks = gltf2_blender_gather_animations.gather_animations(obj_uuid, merged_tracks, len(animations), export_settings)
+ animations += animations_
if export_settings['gltf_nla_strips'] is False:
# Fake an animation with all animations of the scene
diff --git a/io_scene_gltf2/blender/exp/gltf2_blender_gather_animation_channel_target.py b/io_scene_gltf2/blender/exp/gltf2_blender_gather_animation_channel_target.py
index 928fa14a..0e542de8 100755
--- a/io_scene_gltf2/blender/exp/gltf2_blender_gather_animation_channel_target.py
+++ b/io_scene_gltf2/blender/exp/gltf2_blender_gather_animation_channel_target.py
@@ -12,18 +12,20 @@ from io_scene_gltf2.blender.exp import gltf2_blender_gather_skins
from io_scene_gltf2.io.exp.gltf2_io_user_extensions import export_user_extensions
@cached
-def gather_animation_channel_target(channels: typing.Tuple[bpy.types.FCurve],
- blender_object: bpy.types.Object,
+def gather_animation_channel_target(obj_uuid: int,
+ channels: typing.Tuple[bpy.types.FCurve],
bake_bone: typing.Union[str, None],
bake_channel: typing.Union[str, None],
- driver_obj,
+ driver_obj_uuid,
export_settings
) -> gltf2_io.AnimationChannelTarget:
+ blender_object = export_settings['vtree'].nodes[obj_uuid].blender_object
+
animation_channel_target = gltf2_io.AnimationChannelTarget(
extensions=__gather_extensions(channels, blender_object, export_settings, bake_bone),
extras=__gather_extras(channels, blender_object, export_settings, bake_bone),
- node=__gather_node(channels, blender_object, export_settings, bake_bone, driver_obj),
+ node=__gather_node(channels, obj_uuid, export_settings, bake_bone, driver_obj_uuid),
path=__gather_path(channels, blender_object, export_settings, bake_bone, bake_channel)
)
@@ -54,16 +56,16 @@ def __gather_extras(channels: typing.Tuple[bpy.types.FCurve],
def __gather_node(channels: typing.Tuple[bpy.types.FCurve],
- blender_object: bpy.types.Object,
+ obj_uuid: str,
export_settings,
bake_bone: typing.Union[str, None],
- driver_obj
+ driver_obj_uuid
) -> gltf2_io.Node:
- if driver_obj is not None:
- return gltf2_blender_gather_nodes.gather_node(driver_obj,
- driver_obj.library.name if driver_obj.library else None,
- None, None, export_settings)
+ blender_object = export_settings['vtree'].nodes[obj_uuid].blender_object
+
+ if driver_obj_uuid is not None:
+ return export_settings['vtree'].nodes[driver_obj_uuid].node
if blender_object.type == "ARMATURE":
# TODO: get joint from fcurve data_path and gather_joint
@@ -74,16 +76,9 @@ def __gather_node(channels: typing.Tuple[bpy.types.FCurve],
blender_bone = blender_object.path_resolve(channels[0].data_path.rsplit('.', 1)[0])
if isinstance(blender_bone, bpy.types.PoseBone):
- if export_settings["gltf_def_bones"] is False:
- return gltf2_blender_gather_joints.gather_joint(blender_object, blender_bone, export_settings)
- else:
- bones, _, _ = gltf2_blender_gather_skins.get_bone_tree(None, blender_object)
- if blender_bone.name in [b.name for b in bones]:
- return gltf2_blender_gather_joints.gather_joint(blender_object, blender_bone, export_settings)
-
- return gltf2_blender_gather_nodes.gather_node(blender_object,
- blender_object.library.name if blender_object.library else None,
- None, None, export_settings)
+ return gltf2_blender_gather_joints.gather_joint_vnode(export_settings['vtree'].nodes[obj_uuid].bones[blender_bone.name], export_settings)
+
+ return export_settings['vtree'].nodes[obj_uuid].node
def __gather_path(channels: typing.Tuple[bpy.types.FCurve],
diff --git a/io_scene_gltf2/blender/exp/gltf2_blender_gather_animation_channels.py b/io_scene_gltf2/blender/exp/gltf2_blender_gather_animation_channels.py
index 4c79092c..98ae8b82 100755
--- a/io_scene_gltf2/blender/exp/gltf2_blender_gather_animation_channels.py
+++ b/io_scene_gltf2/blender/exp/gltf2_blender_gather_animation_channels.py
@@ -15,15 +15,18 @@ from io_scene_gltf2.blender.exp import gltf2_blender_get
from io_scene_gltf2.blender.exp import gltf2_blender_gather_skins
from io_scene_gltf2.blender.exp import gltf2_blender_gather_drivers
from io_scene_gltf2.io.exp.gltf2_io_user_extensions import export_user_extensions
+from io_scene_gltf2.blender.exp.gltf2_blender_gather_tree import VExportNode
+from . import gltf2_blender_export_keys
@cached
-def gather_animation_channels(blender_action: bpy.types.Action,
- blender_object: bpy.types.Object,
+def gather_animation_channels(obj_uuid: int,
+ blender_action: bpy.types.Action,
export_settings
) -> typing.List[gltf2_io.AnimationChannel]:
channels = []
+ blender_object = export_settings['vtree'].nodes[obj_uuid].blender_object
# First calculate range of animation for baking
# This is need if user set 'Force sampling' and in case we need to bake
@@ -59,11 +62,8 @@ def gather_animation_channels(blender_action: bpy.types.Action,
# Then bake all bones
bones_to_be_animated = []
- if export_settings["gltf_def_bones"] is False:
- bones_to_be_animated = blender_object.data.bones
- else:
- bones_to_be_animated, _, _ = gltf2_blender_gather_skins.get_bone_tree(None, blender_object)
- bones_to_be_animated = [blender_object.pose.bones[b.name] for b in bones_to_be_animated]
+ bones_uuid = export_settings["vtree"].get_all_bones(obj_uuid)
+ bones_to_be_animated = [blender_object.pose.bones[export_settings["vtree"].nodes[b].blender_bone.name] for b in bones_uuid]
list_of_animated_bone_channels = []
for channel_group in __get_channel_groups(blender_action, blender_object, export_settings):
@@ -72,9 +72,9 @@ def gather_animation_channels(blender_action: bpy.types.Action,
for bone in bones_to_be_animated:
for p in ["location", "rotation_quaternion", "scale"]:
- channel = __gather_animation_channel(
+ channel = gather_animation_channel(
+ obj_uuid,
(),
- blender_object,
export_settings,
bone.name,
p,
@@ -95,17 +95,17 @@ def gather_animation_channels(blender_action: bpy.types.Action,
if len(channel_group) == 0:
# Only errors on channels, ignoring
continue
- channel = __gather_animation_channel(channel_group, blender_object, export_settings, None, None, bake_range_start, bake_range_end, force_range, blender_action.name, None, True)
+ channel = gather_animation_channel(obj_uuid, channel_group, export_settings, None, None, bake_range_start, bake_range_end, force_range, blender_action.name, None, True)
if channel is not None:
channels.append(channel)
# Retrieve channels for drivers, if needed
- drivers_to_manage = gltf2_blender_gather_drivers.get_sk_drivers(blender_object)
- for obj, fcurves in drivers_to_manage:
- channel = __gather_animation_channel(
+ drivers_to_manage = gltf2_blender_gather_drivers.get_sk_drivers(obj_uuid, export_settings)
+ for obj_driver_uuid, fcurves in drivers_to_manage:
+ channel = gather_animation_channel(
+ obj_uuid,
fcurves,
- blender_object,
export_settings,
None,
None,
@@ -113,31 +113,77 @@ def gather_animation_channels(blender_action: bpy.types.Action,
bake_range_end,
force_range,
blender_action.name,
- obj,
- False)
+ obj_driver_uuid,
+ True)
if channel is not None:
channels.append(channel)
else:
+ done_paths = []
for channel_group in __get_channel_groups(blender_action, blender_object, export_settings):
channel_group_sorted = __get_channel_group_sorted(channel_group, blender_object)
if len(channel_group_sorted) == 0:
# Only errors on channels, ignoring
continue
- channel = __gather_animation_channel(
- channel_group_sorted,
- blender_object,
+ channel = gather_animation_channel(
+ obj_uuid,
+ channel_group_sorted,
+ export_settings,
+ None,
+ None,
+ bake_range_start,
+ bake_range_end,
+ force_range,
+ blender_action.name,
+ None,
+ True
+ )
+ if channel is not None:
+ channels.append(channel)
+
+ # Store already done channel path
+ target = [c for c in channel_group_sorted if c is not None][0].data_path.split('.')[-1]
+ path = {
+ "delta_location": "location",
+ "delta_rotation_euler": "rotation_quaternion",
+ "location": "location",
+ "rotation_axis_angle": "rotation_quaternion",
+ "rotation_euler": "rotation_quaternion",
+ "rotation_quaternion": "rotation_quaternion",
+ "scale": "scale",
+ "value": "weights"
+ }.get(target)
+ if path is not None:
+ done_paths.append(path)
+ done_paths = list(set(done_paths))
+
+ if export_settings['gltf_selected'] is True and export_settings['vtree'].tree_troncated is True:
+ start_frame = min([v[0] for v in [a.frame_range for a in bpy.data.actions]])
+ end_frame = max([v[1] for v in [a.frame_range for a in bpy.data.actions]])
+ to_be_done = ['location', 'rotation_quaternion', 'scale']
+ to_be_done = [c for c in to_be_done if c not in done_paths]
+
+ # In case of weight action, do nothing.
+ # If there is only weight --> TRS is already managed at first
+ if not (len(done_paths) == 1 and 'weights' in done_paths):
+ for p in to_be_done:
+ channel = gather_animation_channel(
+ obj_uuid,
+ (),
export_settings,
None,
- None,
- bake_range_start,
- bake_range_end,
+ p,
+ start_frame,
+ end_frame,
force_range,
blender_action.name,
None,
- False)
- if channel is not None:
- channels.append(channel)
+ False #If Object is not animated, don't keep animation for this channel
+ )
+
+ if channel is not None:
+ channels.append(channel)
+
# resetting driver caches
@@ -198,8 +244,9 @@ def __get_channel_group_sorted(channels: typing.Tuple[bpy.types.FCurve], blender
# if not shapekeys, stay in same order, because order doesn't matter
return channels
-def __gather_animation_channel(channels: typing.Tuple[bpy.types.FCurve],
- blender_object: bpy.types.Object,
+# This function can be called directly from gather_animation in case of bake animation (non animated selected object)
+def gather_animation_channel(obj_uuid: str,
+ channels: typing.Tuple[bpy.types.FCurve],
export_settings,
bake_bone: typing.Union[str, None],
bake_channel: typing.Union[str, None],
@@ -207,15 +254,18 @@ def __gather_animation_channel(channels: typing.Tuple[bpy.types.FCurve],
bake_range_end,
force_range: bool,
action_name: str,
- driver_obj,
+ driver_obj_uuid,
node_channel_is_animated: bool
) -> typing.Union[gltf2_io.AnimationChannel, None]:
+
+ blender_object = export_settings['vtree'].nodes[obj_uuid].blender_object
+
if not __filter_animation_channel(channels, blender_object, export_settings):
return None
- __target= __gather_target(channels, blender_object, export_settings, bake_bone, bake_channel, driver_obj)
+ __target= __gather_target(obj_uuid, channels, export_settings, bake_bone, bake_channel, driver_obj_uuid)
if __target.path is not None:
- sampler = __gather_sampler(channels, blender_object, export_settings, bake_bone, bake_channel, bake_range_start, bake_range_end, force_range, action_name, driver_obj, node_channel_is_animated)
+ sampler = __gather_sampler(channels, obj_uuid, export_settings, bake_bone, bake_channel, bake_range_start, bake_range_end, force_range, action_name, driver_obj_uuid, node_channel_is_animated)
if sampler is None:
# After check, no need to animate this node for this channel
@@ -268,7 +318,7 @@ def __gather_extras(channels: typing.Tuple[bpy.types.FCurve],
def __gather_sampler(channels: typing.Tuple[bpy.types.FCurve],
- blender_object: bpy.types.Object,
+ obj_uuid: str,
export_settings,
bake_bone: typing.Union[str, None],
bake_channel: typing.Union[str, None],
@@ -276,33 +326,38 @@ def __gather_sampler(channels: typing.Tuple[bpy.types.FCurve],
bake_range_end,
force_range: bool,
action_name,
- driver_obj,
+ driver_obj_uuid,
node_channel_is_animated: bool
) -> gltf2_io.AnimationSampler:
+
+ need_rotation_correction = (export_settings[gltf2_blender_export_keys.CAMERAS] and export_settings['vtree'].nodes[obj_uuid].blender_type == VExportNode.CAMERA) or \
+ (export_settings[gltf2_blender_export_keys.LIGHTS] and export_settings['vtree'].nodes[obj_uuid].blender_type == VExportNode.LIGHT)
+
return gltf2_blender_gather_animation_samplers.gather_animation_sampler(
channels,
- blender_object,
+ obj_uuid,
bake_bone,
bake_channel,
bake_range_start,
bake_range_end,
force_range,
action_name,
- driver_obj,
+ driver_obj_uuid,
node_channel_is_animated,
+ need_rotation_correction,
export_settings
)
-def __gather_target(channels: typing.Tuple[bpy.types.FCurve],
- blender_object: bpy.types.Object,
+def __gather_target(obj_uuid: str,
+ channels: typing.Tuple[bpy.types.FCurve],
export_settings,
bake_bone: typing.Union[str, None],
bake_channel: typing.Union[str, None],
- driver_obj
+ driver_obj_uuid
) -> gltf2_io.AnimationChannelTarget:
return gltf2_blender_gather_animation_channel_target.gather_animation_channel_target(
- channels, blender_object, bake_bone, bake_channel, driver_obj, export_settings)
+ obj_uuid, channels, bake_bone, bake_channel, driver_obj_uuid, export_settings)
def __get_channel_groups(blender_action: bpy.types.Action, blender_object: bpy.types.Object, export_settings):
diff --git a/io_scene_gltf2/blender/exp/gltf2_blender_gather_animation_sampler_keyframes.py b/io_scene_gltf2/blender/exp/gltf2_blender_gather_animation_sampler_keyframes.py
index d24db395..66ce11c7 100755
--- a/io_scene_gltf2/blender/exp/gltf2_blender_gather_animation_sampler_keyframes.py
+++ b/io_scene_gltf2/blender/exp/gltf2_blender_gather_animation_sampler_keyframes.py
@@ -5,12 +5,13 @@ import bpy
import mathutils
import typing
-from io_scene_gltf2.blender.exp.gltf2_blender_gather_cache import cached, bonecache
+from io_scene_gltf2.blender.exp.gltf2_blender_gather_cache import cached, bonecache, objectcache
from io_scene_gltf2.blender.com import gltf2_blender_math
from io_scene_gltf2.blender.exp import gltf2_blender_get
from io_scene_gltf2.blender.exp.gltf2_blender_gather_drivers import get_sk_drivers, get_sk_driver_values
from . import gltf2_blender_export_keys
from io_scene_gltf2.io.com import gltf2_io_debug
+from io_scene_gltf2.blender.exp.gltf2_blender_gather_tree import VExportNode
import numpy as np
@@ -95,6 +96,10 @@ class Keyframe:
def value(self, value: typing.List[float]):
self.__value = self.__set_indexed(value)
+ @value.setter
+ def value_total(self, value: typing.List[float]):
+ self.__value = value
+
@property
def in_tangent(self) -> typing.Union[mathutils.Vector, mathutils.Euler, mathutils.Quaternion, typing.List[float]]:
if self.__in_tangent is None:
@@ -120,9 +125,75 @@ class Keyframe:
self.__out_tangent = self.__set_indexed(value)
+@objectcache
+def get_object_matrix(blender_obj_uuid: str,
+ action_name: str,
+ bake_range_start: int,
+ bake_range_end: int,
+ current_frame: int,
+ step: int,
+ export_settings
+ ):
+
+ data = {}
+
+ # TODO : bake_range_start & bake_range_end are no more needed here
+ # Because we bake, we don't know exactly the frame range,
+ # So using min / max of all actions
+
+ start_frame = min([v[0] for v in [a.frame_range for a in bpy.data.actions]])
+ end_frame = max([v[1] for v in [a.frame_range for a in bpy.data.actions]])
+
+ frame = start_frame
+ while frame <= end_frame:
+ bpy.context.scene.frame_set(int(frame))
+
+ for obj_uuid in [uid for (uid, n) in export_settings['vtree'].nodes.items() if n.blender_type not in [VExportNode.BONE]]:
+ blender_obj = export_settings['vtree'].nodes[obj_uuid].blender_object
+
+ # if this object is not animated, do not skip :
+ # We need this object too in case of bake
+
+ # calculate local matrix
+ if export_settings['vtree'].nodes[obj_uuid].parent_uuid is None:
+ parent_mat = mathutils.Matrix.Identity(4).freeze()
+ else:
+ if export_settings['vtree'].nodes[export_settings['vtree'].nodes[obj_uuid].parent_uuid].blender_type not in [VExportNode.BONE]:
+ parent_mat = export_settings['vtree'].nodes[export_settings['vtree'].nodes[obj_uuid].parent_uuid].blender_object.matrix_world
+ else:
+ # Object animated is parented to a bone
+ blender_bone = export_settings['vtree'].nodes[export_settings['vtree'].nodes[obj_uuid].parent_bone_uuid].blender_bone
+ armature_object = export_settings['vtree'].nodes[export_settings['vtree'].nodes[export_settings['vtree'].nodes[obj_uuid].parent_bone_uuid].armature].blender_object
+ axis_basis_change = mathutils.Matrix(
+ ((1.0, 0.0, 0.0, 0.0), (0.0, 0.0, 1.0, 0.0), (0.0, -1.0, 0.0, 0.0), (0.0, 0.0, 0.0, 1.0)))
+
+ parent_mat = armature_object.matrix_world @ blender_bone.matrix @ axis_basis_change
+
+ #For object inside collection (at root), matrix world is already expressed regarding collection parent
+ if export_settings['vtree'].nodes[obj_uuid].parent_uuid is not None and export_settings['vtree'].nodes[export_settings['vtree'].nodes[obj_uuid].parent_uuid].blender_type == VExportNode.COLLECTION:
+ parent_mat = mathutils.Matrix.Identity(4).freeze()
+
+ mat = parent_mat.inverted_safe() @ blender_obj.matrix_world
+
+ if obj_uuid not in data.keys():
+ data[obj_uuid] = {}
+
+ if blender_obj.animation_data and blender_obj.animation_data.action:
+ if blender_obj.animation_data.action.name not in data[obj_uuid].keys():
+ data[obj_uuid][blender_obj.animation_data.action.name] = {}
+ data[obj_uuid][blender_obj.animation_data.action.name][frame] = mat
+ else:
+ # case of baking selected object.
+ # There is no animation, so use uuid of object as key
+ if obj_uuid not in data[obj_uuid].keys():
+ data[obj_uuid][obj_uuid] = {}
+ data[obj_uuid][obj_uuid][frame] = mat
+
+ frame += step
+ return data
@bonecache
-def get_bone_matrix(blender_object_if_armature: typing.Optional[bpy.types.Object],
+def get_bone_matrix(blender_obj_uuid_if_armature: typing.Optional[str],
channels: typing.Tuple[bpy.types.FCurve],
bake_bone: typing.Union[str, None],
bake_channel: typing.Union[str, None],
@@ -130,9 +201,11 @@ def get_bone_matrix(blender_object_if_armature: typing.Optional[bpy.types.Object
bake_range_end,
action_name: str,
current_frame: int,
- step: int
+ step: int,
+ export_settings
):
+ blender_object_if_armature = export_settings['vtree'].nodes[blender_obj_uuid_if_armature].blender_object if blender_obj_uuid_if_armature is not None else None
data = {}
# Always using bake_range, because some bones may need to be baked,
@@ -145,35 +218,40 @@ def get_bone_matrix(blender_object_if_armature: typing.Optional[bpy.types.Object
frame = start_frame
while frame <= end_frame:
data[frame] = {}
- # we need to bake in the constraints
bpy.context.scene.frame_set(int(frame))
- for pbone in blender_object_if_armature.pose.bones:
- if bake_bone is None:
- matrix = pbone.matrix_basis.copy()
+ bones = export_settings['vtree'].get_all_bones(blender_obj_uuid_if_armature)
+
+ for bone_uuid in bones:
+ blender_bone = export_settings['vtree'].nodes[bone_uuid].blender_bone
+
+ if export_settings['vtree'].nodes[bone_uuid].parent_uuid is not None and export_settings['vtree'].nodes[export_settings['vtree'].nodes[bone_uuid].parent_uuid].blender_type == VExportNode.BONE:
+ blender_bone_parent = export_settings['vtree'].nodes[export_settings['vtree'].nodes[bone_uuid].parent_uuid].blender_bone
+ rest_mat = blender_bone_parent.bone.matrix_local.inverted_safe() @ blender_bone.bone.matrix_local
+ matrix = rest_mat.inverted_safe() @ blender_bone_parent.matrix.inverted_safe() @ blender_bone.matrix
else:
- if (pbone.bone.use_inherit_rotation == False or pbone.bone.inherit_scale != "FULL") and pbone.parent != None:
- rest_mat = (pbone.parent.bone.matrix_local.inverted_safe() @ pbone.bone.matrix_local)
- matrix = (rest_mat.inverted_safe() @ pbone.parent.matrix.inverted_safe() @ pbone.matrix)
+ if blender_bone.parent is None:
+ matrix = blender_bone.bone.matrix_local.inverted_safe() @ blender_bone.matrix
else:
- matrix = pbone.matrix
- matrix = blender_object_if_armature.convert_space(pose_bone=pbone, matrix=matrix, from_space='POSE', to_space='LOCAL')
+ # Bone has a parent, but in export, after filter, is at root of armature
+ matrix = blender_bone.matrix.copy()
-
- data[frame][pbone.name] = matrix
+ data[frame][blender_bone.name] = matrix
# If some drivers must be evaluated, do it here, to avoid to have to change frame by frame later
- drivers_to_manage = get_sk_drivers(blender_object_if_armature)
- for dr_obj, dr_fcurves in drivers_to_manage:
- vals = get_sk_driver_values(dr_obj, frame, dr_fcurves)
+ drivers_to_manage = get_sk_drivers(blender_obj_uuid_if_armature, export_settings)
+ for dr_obj_uuid, dr_fcurves in drivers_to_manage:
+ vals = get_sk_driver_values(dr_obj_uuid, frame, dr_fcurves, export_settings)
frame += step
return data
# cache for performance reasons
+# This function is called 2 times, for input (timing) and output (key values)
@cached
-def gather_keyframes(blender_object_if_armature: typing.Optional[bpy.types.Object],
+def gather_keyframes(blender_obj_uuid: str,
+ is_armature: bool,
channels: typing.Tuple[bpy.types.FCurve],
non_keyed_values: typing.Tuple[typing.Optional[float]],
bake_bone: typing.Union[str, None],
@@ -182,32 +260,40 @@ def gather_keyframes(blender_object_if_armature: typing.Optional[bpy.types.Objec
bake_range_end,
force_range: bool,
action_name: str,
- driver_obj,
+ driver_obj_uuid,
node_channel_is_animated: bool,
export_settings
- ) -> typing.List[Keyframe]:
+ ) -> typing.Tuple[typing.List[Keyframe], bool]:
"""Convert the blender action groups' fcurves to keyframes for use in glTF."""
+
+ blender_object_if_armature = export_settings['vtree'].nodes[blender_obj_uuid].blender_object if is_armature is True is not None else None
+ blender_obj_uuid_if_armature = blender_obj_uuid if is_armature is True else None
+
if force_range is True:
start_frame = bake_range_start
end_frame = bake_range_end
else:
- if bake_bone is None and driver_obj is None:
+ if bake_bone is None and driver_obj_uuid is None:
# Find the start and end of the whole action group
# Note: channels has some None items only for SK if some SK are not animated
ranges = [channel.range() for channel in channels if channel is not None]
- start_frame = min([channel.range()[0] for channel in channels if channel is not None])
- end_frame = max([channel.range()[1] for channel in channels if channel is not None])
+ if len(channels) != 0:
+ start_frame = min([channel.range()[0] for channel in channels if channel is not None])
+ end_frame = max([channel.range()[1] for channel in channels if channel is not None])
+ else:
+ start_frame = bake_range_start
+ end_frame = bake_range_end
else:
start_frame = bake_range_start
end_frame = bake_range_end
keyframes = []
- if needs_baking(blender_object_if_armature, channels, export_settings):
+ baking_is_needed = needs_baking(blender_object_if_armature, channels, export_settings)
+ if baking_is_needed:
# Bake the animation, by evaluating the animation for all frames
- # TODO: maybe baking can also be done with FCurve.convert_to_samples
- if blender_object_if_armature is not None and driver_obj is None:
+ if blender_object_if_armature is not None and driver_obj_uuid is None:
if bake_bone is None:
pose_bone_if_armature = gltf2_blender_get.get_object_from_datapath(blender_object_if_armature,
channels[0].data_path)
@@ -224,7 +310,7 @@ def gather_keyframes(blender_object_if_armature: typing.Optional[bpy.types.Objec
if isinstance(pose_bone_if_armature, bpy.types.PoseBone):
mat = get_bone_matrix(
- blender_object_if_armature,
+ blender_obj_uuid_if_armature,
channels,
bake_bone,
bake_channel,
@@ -232,7 +318,8 @@ def gather_keyframes(blender_object_if_armature: typing.Optional[bpy.types.Objec
bake_range_end,
action_name,
frame,
- step
+ step,
+ export_settings
)
trans, rot, scale = mat.decompose()
@@ -248,12 +335,36 @@ def gather_keyframes(blender_object_if_armature: typing.Optional[bpy.types.Objec
"scale": scale
}[target_property]
else:
- if driver_obj is None:
- # Note: channels has some None items only for SK if some SK are not animated
- key.value = [c.evaluate(frame) for c in channels if c is not None]
- complete_key(key, non_keyed_values)
+ if driver_obj_uuid is None:
+ # If channel is TRS, we bake from world matrix, else this is SK
+ if len(channels) != 0:
+ target = [c for c in channels if c is not None][0].data_path.split('.')[-1]
+ else:
+ target = bake_channel
+ if target == "value": #SK
+ # Note: channels has some None items only for SK if some SK are not animated
+ key.value = [c.evaluate(frame) for c in channels if c is not None]
+ complete_key(key, non_keyed_values)
+ else:
+
+ mat = get_object_matrix(blender_obj_uuid,
+ action_name,
+ bake_range_start,
+ bake_range_end,
+ frame,
+ step,
+ export_settings)
+
+ trans, rot, sca = mat.decompose()
+ key.value_total = {
+ "location": trans,
+ "rotation_axis_angle": [rot.to_axis_angle()[1], rot.to_axis_angle()[0][0], rot.to_axis_angle()[0][1], rot.to_axis_angle()[0][2]],
+ "rotation_euler": rot.to_euler(),
+ "rotation_quaternion": rot,
+ "scale": sca
+ }[target]
else:
- key.value = get_sk_driver_values(driver_obj, frame, channels)
+ key.value = get_sk_driver_values(driver_obj_uuid, frame, channels, export_settings)
complete_key(key, non_keyed_values)
keyframes.append(key)
frame += step
@@ -307,7 +418,7 @@ def gather_keyframes(blender_object_if_armature: typing.Optional[bpy.types.Objec
keyframes.append(key)
if not export_settings[gltf2_blender_export_keys.OPTIMIZE_ANIMS]:
- return keyframes
+ return (keyframes, baking_is_needed)
# For armature only
# Check if all values are the same
@@ -319,17 +430,20 @@ def gather_keyframes(blender_object_if_armature: typing.Optional[bpy.types.Objec
if node_channel_is_animated is True: # fcurve on this bone for this property
# Keep animation, but keep only 2 keyframes if data are not changing
- return [keyframes[0], keyframes[-1]] if cst is True and len(keyframes) >= 2 else keyframes
+ return ([keyframes[0], keyframes[-1]], baking_is_needed) if cst is True and len(keyframes) >= 2 else (keyframes, baking_is_needed)
else: # bone is not animated (no fcurve)
# Not keeping if not changing property
- return None if cst is True else keyframes
+ return (None, baking_is_needed) if cst is True else (keyframes, baking_is_needed)
else:
# For objects, if all values are the same, we keep only first and last
cst = fcurve_is_constant(keyframes)
- return [keyframes[0], keyframes[-1]] if cst is True and len(keyframes) >= 2 else keyframes
-
+ if node_channel_is_animated is True:
+ return ([keyframes[0], keyframes[-1]], baking_is_needed) if cst is True and len(keyframes) >= 2 else (keyframes, baking_is_needed)
+ else:
+ # baked object (selected but not animated)
+ return (None, baking_is_needed) if cst is True else (keyframes, baking_is_needed)
- return keyframes
+ return (keyframes, baking_is_needed)
def fcurve_is_constant(keyframes):
@@ -374,6 +488,10 @@ def needs_baking(blender_object_if_armature: typing.Optional[bpy.types.Object],
if export_settings[gltf2_blender_export_keys.FORCE_SAMPLING]:
return True
+ # If tree is troncated, sampling is forced
+ if export_settings['vtree'].tree_troncated is True:
+ return True
+
# Sampling due to unsupported interpolation
interpolation = [c for c in channels if c is not None][0].keyframe_points[0].interpolation
if interpolation not in ["BEZIER", "LINEAR", "CONSTANT"]:
diff --git a/io_scene_gltf2/blender/exp/gltf2_blender_gather_animation_samplers.py b/io_scene_gltf2/blender/exp/gltf2_blender_gather_animation_samplers.py
index b3cc9d30..143fccea 100755
--- a/io_scene_gltf2/blender/exp/gltf2_blender_gather_animation_samplers.py
+++ b/io_scene_gltf2/blender/exp/gltf2_blender_gather_animation_samplers.py
@@ -3,6 +3,7 @@
import typing
+from io_scene_gltf2.blender.exp.gltf2_blender_gather_tree import VExportNode
import bpy
import mathutils
@@ -21,20 +22,23 @@ from io_scene_gltf2.io.exp.gltf2_io_user_extensions import export_user_extension
@cached
def gather_animation_sampler(channels: typing.Tuple[bpy.types.FCurve],
- blender_object: bpy.types.Object,
+ obj_uuid: str,
bake_bone: typing.Union[str, None],
bake_channel: typing.Union[str, None],
bake_range_start,
bake_range_end,
force_range: bool,
action_name: str,
- driver_obj,
+ driver_obj_uuid,
node_channel_is_animated: bool,
+ need_rotation_correction,
export_settings
) -> gltf2_io.AnimationSampler:
- blender_object_if_armature = blender_object if blender_object.type == "ARMATURE" else None
- if blender_object_if_armature is not None and driver_obj is None:
+ blender_object = export_settings['vtree'].nodes[obj_uuid].blender_object
+ is_armature = True if blender_object.type == "ARMATURE" else False
+ blender_object_if_armature = blender_object if is_armature is True else None
+ if is_armature is True and driver_obj_uuid is None:
if bake_bone is None:
pose_bone_if_armature = gltf2_blender_get.get_object_from_datapath(blender_object_if_armature,
channels[0].data_path)
@@ -45,15 +49,15 @@ def gather_animation_sampler(channels: typing.Tuple[bpy.types.FCurve],
non_keyed_values = __gather_non_keyed_values(channels, blender_object,
blender_object_if_armature, pose_bone_if_armature,
bake_channel,
- driver_obj,
+ driver_obj_uuid,
export_settings)
if blender_object.parent is not None:
matrix_parent_inverse = blender_object.matrix_parent_inverse.copy().freeze()
else:
matrix_parent_inverse = mathutils.Matrix.Identity(4).freeze()
- input = __gather_input(channels, blender_object_if_armature, non_keyed_values,
- bake_bone, bake_channel, bake_range_start, bake_range_end, force_range, action_name, driver_obj, node_channel_is_animated, export_settings)
+ input = __gather_input(channels, obj_uuid, is_armature, non_keyed_values,
+ bake_bone, bake_channel, bake_range_start, bake_range_end, force_range, action_name, driver_obj_uuid, node_channel_is_animated, export_settings)
if input is None:
# After check, no need to animate this node for this channel
@@ -66,7 +70,8 @@ def gather_animation_sampler(channels: typing.Tuple[bpy.types.FCurve],
interpolation=__gather_interpolation(channels, blender_object_if_armature, export_settings, bake_bone, bake_channel),
output=__gather_output(channels,
matrix_parent_inverse,
- blender_object_if_armature,
+ obj_uuid,
+ is_armature,
non_keyed_values,
bake_bone,
bake_channel,
@@ -74,8 +79,9 @@ def gather_animation_sampler(channels: typing.Tuple[bpy.types.FCurve],
bake_range_end,
force_range,
action_name,
- driver_obj,
+ driver_obj_uuid,
node_channel_is_animated,
+ need_rotation_correction,
export_settings)
)
@@ -97,12 +103,13 @@ def __gather_non_keyed_values(channels: typing.Tuple[bpy.types.FCurve],
blender_object_if_armature: typing.Optional[bpy.types.Object],
pose_bone_if_armature: typing.Optional[bpy.types.PoseBone],
bake_channel: typing.Union[str, None],
- driver_obj,
+ driver_obj_uuid,
export_settings
) -> typing.Tuple[typing.Optional[float]]:
non_keyed_values = []
+ driver_obj = export_settings['vtree'].nodes[driver_obj_uuid].blender_object if driver_obj_uuid is not None else None
obj = blender_object if driver_obj is None else driver_obj
# Note: channels has some None items only for SK if some SK are not animated
@@ -217,10 +224,10 @@ def __gather_extras(channels: typing.Tuple[bpy.types.FCurve],
) -> typing.Any:
return None
-
@cached
def __gather_input(channels: typing.Tuple[bpy.types.FCurve],
- blender_object_if_armature: typing.Optional[bpy.types.Object],
+ blender_obj_uuid: str,
+ is_armature: bool,
non_keyed_values: typing.Tuple[typing.Optional[float]],
bake_bone: typing.Union[str, None],
bake_channel: typing.Union[str, None],
@@ -228,12 +235,13 @@ def __gather_input(channels: typing.Tuple[bpy.types.FCurve],
bake_range_end,
force_range: bool,
action_name,
- driver_obj,
+ driver_obj_uuid,
node_channel_is_animated: bool,
export_settings
) -> gltf2_io.Accessor:
"""Gather the key time codes."""
- keyframes = gltf2_blender_gather_animation_sampler_keyframes.gather_keyframes(blender_object_if_armature,
+ keyframes, is_baked = gltf2_blender_gather_animation_sampler_keyframes.gather_keyframes(blender_obj_uuid,
+ is_armature,
channels,
non_keyed_values,
bake_bone,
@@ -242,7 +250,7 @@ def __gather_input(channels: typing.Tuple[bpy.types.FCurve],
bake_range_end,
force_range,
action_name,
- driver_obj,
+ driver_obj_uuid,
node_channel_is_animated,
export_settings)
if keyframes is None:
@@ -277,14 +285,15 @@ def __gather_interpolation(channels: typing.Tuple[bpy.types.FCurve],
# TODO: check if the bone was animated with CONSTANT
return 'LINEAR'
else:
- max_keyframes = max([len(ch.keyframe_points) for ch in channels if ch is not None])
- # If only single keyframe revert to STEP
- if max_keyframes < 2:
- return 'STEP'
+ if len(channels) != 0: # channels can be empty when baking object (non animated selected object)
+ max_keyframes = max([len(ch.keyframe_points) for ch in channels if ch is not None])
+ # If only single keyframe revert to STEP
+ if max_keyframes < 2:
+ return 'STEP'
- # If all keyframes are CONSTANT, we can use STEP.
- if all(all(k.interpolation == 'CONSTANT' for k in c.keyframe_points) for c in channels if c is not None):
- return 'STEP'
+ # If all keyframes are CONSTANT, we can use STEP.
+ if all(all(k.interpolation == 'CONSTANT' for k in c.keyframe_points) for c in channels if c is not None):
+ return 'STEP'
# Otherwise, sampled keyframes use LINEAR interpolation.
return 'LINEAR'
@@ -304,7 +313,8 @@ def __gather_interpolation(channels: typing.Tuple[bpy.types.FCurve],
@cached
def __gather_output(channels: typing.Tuple[bpy.types.FCurve],
parent_inverse,
- blender_object_if_armature: typing.Optional[bpy.types.Object],
+ blender_obj_uuid: str,
+ is_armature: bool,
non_keyed_values: typing.Tuple[typing.Optional[float]],
bake_bone: typing.Union[str, None],
bake_channel: typing.Union[str, None],
@@ -314,10 +324,12 @@ def __gather_output(channels: typing.Tuple[bpy.types.FCurve],
action_name,
driver_obj,
node_channel_is_animated: bool,
+ need_rotation_correction: bool,
export_settings
) -> gltf2_io.Accessor:
"""Gather the data of the keyframes."""
- keyframes = gltf2_blender_gather_animation_sampler_keyframes.gather_keyframes(blender_object_if_armature,
+ keyframes, is_baked = gltf2_blender_gather_animation_sampler_keyframes.gather_keyframes(blender_obj_uuid,
+ is_armature,
channels,
non_keyed_values,
bake_bone,
@@ -329,10 +341,19 @@ def __gather_output(channels: typing.Tuple[bpy.types.FCurve],
driver_obj,
node_channel_is_animated,
export_settings)
+
+ if is_baked is True:
+ parent_inverse = mathutils.Matrix.Identity(4).freeze()
+
+ blender_object_if_armature = export_settings['vtree'].nodes[blender_obj_uuid].blender_object if is_armature is True else None
+
if bake_bone is not None:
target_datapath = "pose.bones['" + bake_bone + "']." + bake_channel
else:
- target_datapath = [c for c in channels if c is not None][0].data_path
+ if len(channels) != 0: # channels can be empty when baking object (non animated selected object)
+ target_datapath = [c for c in channels if c is not None][0].data_path
+ else:
+ target_datapath = bake_channel
is_yup = export_settings[gltf2_blender_export_keys.YUP]
@@ -355,6 +376,7 @@ def __gather_output(channels: typing.Tuple[bpy.types.FCurve],
bone = blender_object_if_armature.pose.bones[bake_bone]
if isinstance(bone, bpy.types.PoseBone):
if bone.parent is None:
+ # bone at root of armature
axis_basis_change = mathutils.Matrix.Identity(4)
if export_settings[gltf2_blender_export_keys.YUP]:
axis_basis_change = mathutils.Matrix(
@@ -364,10 +386,25 @@ def __gather_output(channels: typing.Tuple[bpy.types.FCurve],
(0.0, 0.0, 0.0, 1.0)))
correction_matrix_local = axis_basis_change @ bone.bone.matrix_local
else:
- correction_matrix_local = (
- bone.parent.bone.matrix_local.inverted_safe() @
- bone.bone.matrix_local
- )
+ # Bone is not at root of armature
+ # There are 2 cases :
+ parent_uuid = export_settings['vtree'].nodes[export_settings['vtree'].nodes[blender_obj_uuid].bones[bone.name]].parent_uuid
+ if parent_uuid is not None and export_settings['vtree'].nodes[parent_uuid].blender_type == VExportNode.BONE:
+ # export bone is not at root of armature neither
+ correction_matrix_local = (
+ bone.parent.bone.matrix_local.inverted_safe() @
+ bone.bone.matrix_local
+ )
+ else:
+ # exported bone (after filter) is at root of armature
+ axis_basis_change = mathutils.Matrix.Identity(4)
+ if export_settings[gltf2_blender_export_keys.YUP]:
+ axis_basis_change = mathutils.Matrix(
+ ((1.0, 0.0, 0.0, 0.0),
+ (0.0, 0.0, 1.0, 0.0),
+ (0.0, -1.0, 0.0, 0.0),
+ (0.0, 0.0, 0.0, 1.0)))
+ correction_matrix_local = axis_basis_change
transform = correction_matrix_local
else:
@@ -378,14 +415,14 @@ def __gather_output(channels: typing.Tuple[bpy.types.FCurve],
values = []
for keyframe in keyframes:
# Transform the data and build gltf control points
- value = gltf2_blender_math.transform(keyframe.value, target_datapath, transform)
+ value = gltf2_blender_math.transform(keyframe.value, target_datapath, transform, need_rotation_correction)
if is_yup and not is_armature_animation:
value = gltf2_blender_math.swizzle_yup(value, target_datapath)
keyframe_value = gltf2_blender_math.mathutils_to_gltf(value)
if keyframe.in_tangent is not None:
# we can directly transform the tangent as it currently is represented by a control point
- in_tangent = gltf2_blender_math.transform(keyframe.in_tangent, target_datapath, transform)
+ in_tangent = gltf2_blender_math.transform(keyframe.in_tangent, target_datapath, transform, need_rotation_correction)
if is_yup and blender_object_if_armature is None:
in_tangent = gltf2_blender_math.swizzle_yup(in_tangent, target_datapath)
# the tangent in glTF is relative to the keyframe value
@@ -397,7 +434,7 @@ def __gather_output(channels: typing.Tuple[bpy.types.FCurve],
if keyframe.out_tangent is not None:
# we can directly transform the tangent as it currently is represented by a control point
- out_tangent = gltf2_blender_math.transform(keyframe.out_tangent, target_datapath, transform)
+ out_tangent = gltf2_blender_math.transform(keyframe.out_tangent, target_datapath, transform, need_rotation_correction)
if is_yup and blender_object_if_armature is None:
out_tangent = gltf2_blender_math.swizzle_yup(out_tangent, target_datapath)
# the tangent in glTF is relative to the keyframe value
diff --git a/io_scene_gltf2/blender/exp/gltf2_blender_gather_animations.py b/io_scene_gltf2/blender/exp/gltf2_blender_gather_animations.py
index 39f09d52..c56517fb 100755
--- a/io_scene_gltf2/blender/exp/gltf2_blender_gather_animations.py
+++ b/io_scene_gltf2/blender/exp/gltf2_blender_gather_animations.py
@@ -11,7 +11,36 @@ from ..com.gltf2_blender_extras import generate_extras
from io_scene_gltf2.io.exp.gltf2_io_user_extensions import export_user_extensions
-def gather_animations(blender_object: bpy.types.Object,
+def __gather_channels_baked(obj_uuid, export_settings):
+ channels = []
+
+ # If no animation in file, no need to bake
+ if len(bpy.data.actions) == 0:
+ return None
+
+ start_frame = min([v[0] for v in [a.frame_range for a in bpy.data.actions]])
+ end_frame = max([v[1] for v in [a.frame_range for a in bpy.data.actions]])
+
+ for p in ["location", "rotation_quaternion", "scale"]:
+ channel = gltf2_blender_gather_animation_channels.gather_animation_channel(
+ obj_uuid,
+ (),
+ export_settings,
+ None,
+ p,
+ start_frame,
+ end_frame,
+ False,
+ obj_uuid, # Use obj uuid as action name for caching
+ None,
+ False #If Object is not animated, don't keep animation for this channel
+ )
+ if channel is not None:
+ channels.append(channel)
+
+ return channels if len(channels) > 0 else None
+
+def gather_animations( obj_uuid: int,
tracks: typing.Dict[str, typing.List[int]],
offset: int,
export_settings) -> typing.Tuple[typing.List[gltf2_io.Animation], typing.Dict[str, typing.List[int]]]:
@@ -24,11 +53,35 @@ def gather_animations(blender_object: bpy.types.Object,
"""
animations = []
+ blender_object = export_settings['vtree'].nodes[obj_uuid].blender_object
+
# Collect all 'actions' affecting this object. There is a direct mapping between blender actions and glTF animations
blender_actions = __get_blender_actions(blender_object, export_settings)
- # save the current active action of the object, if any
- # We will restore it after export
+ if len([a for a in blender_actions if a[2] == "OBJECT"]) == 0:
+ # No TRS animation are found for this object.
+ # But we need to bake, in case we export selection
+ if export_settings['gltf_selected'] is True and blender_object.type != "ARMATURE":
+ channels = __gather_channels_baked(obj_uuid, export_settings)
+ if channels is not None:
+ animation = gltf2_io.Animation(
+ channels=channels,
+ extensions=None, # as other animations
+ extras=None, # Because there is no animation to get extras from
+ name=blender_object.name, # Use object name as animation name
+ samplers=[]
+ )
+
+ __link_samplers(animation, export_settings)
+ if animation is not None:
+ animations.append(animation)
+ elif export_settings['gltf_selected'] is True and blender_object.type == "ARMATURE":
+ # We need to bake all bones. Because some bone can have some constraints linking to
+ # some other armature bones, for example
+ #TODO
+ pass
+
+
current_action = None
if blender_object.animation_data and blender_object.animation_data.action:
current_action = blender_object.animation_data.action
@@ -63,7 +116,7 @@ def gather_animations(blender_object: bpy.types.Object,
# No need to set active shapekeys animations, this is needed for bone baking
- animation = __gather_animation(blender_action, blender_object, export_settings)
+ animation = __gather_animation(obj_uuid, blender_action, export_settings)
if animation is not None:
animations.append(animation)
@@ -91,21 +144,24 @@ def gather_animations(blender_object: bpy.types.Object,
return animations, tracks
-def __gather_animation(blender_action: bpy.types.Action,
- blender_object: bpy.types.Object,
- export_settings
+def __gather_animation( obj_uuid: int,
+ blender_action: bpy.types.Action,
+ export_settings
) -> typing.Optional[gltf2_io.Animation]:
+
+ blender_object = export_settings['vtree'].nodes[obj_uuid].blender_object
+
if not __filter_animation(blender_action, blender_object, export_settings):
return None
name = __gather_name(blender_action, blender_object, export_settings)
try:
animation = gltf2_io.Animation(
- channels=__gather_channels(blender_action, blender_object, export_settings),
+ channels=__gather_channels(obj_uuid, blender_action, export_settings),
extensions=__gather_extensions(blender_action, blender_object, export_settings),
extras=__gather_extras(blender_action, blender_object, export_settings),
name=name,
- samplers=__gather_samplers(blender_action, blender_object, export_settings)
+ samplers=__gather_samplers(obj_uuid, blender_action, export_settings)
)
except RuntimeError as error:
print_console("WARNING", "Animation '{}' could not be exported. Cause: {}".format(name, error))
@@ -134,12 +190,12 @@ def __filter_animation(blender_action: bpy.types.Action,
return True
-def __gather_channels(blender_action: bpy.types.Action,
- blender_object: bpy.types.Object,
+def __gather_channels(obj_uuid: int,
+ blender_action: bpy.types.Action,
export_settings
) -> typing.List[gltf2_io.AnimationChannel]:
return gltf2_blender_gather_animation_channels.gather_animation_channels(
- blender_action, blender_object, export_settings)
+ obj_uuid, blender_action, export_settings)
def __gather_extensions(blender_action: bpy.types.Action,
@@ -166,8 +222,8 @@ def __gather_name(blender_action: bpy.types.Action,
return blender_action.name
-def __gather_samplers(blender_action: bpy.types.Action,
- blender_object: bpy.types.Object,
+def __gather_samplers(obj_uuid: str,
+ blender_action: bpy.types.Action,
export_settings
) -> typing.List[gltf2_io.AnimationSampler]:
# We need to gather the samplers after gathering all channels --> populate this list in __link_samplers
diff --git a/io_scene_gltf2/blender/exp/gltf2_blender_gather_cache.py b/io_scene_gltf2/blender/exp/gltf2_blender_gather_cache.py
index 7e49ac02..4f95431c 100755
--- a/io_scene_gltf2/blender/exp/gltf2_blender_gather_cache.py
+++ b/io_scene_gltf2/blender/exp/gltf2_blender_gather_cache.py
@@ -6,83 +6,134 @@ import bpy
from io_scene_gltf2.blender.exp import gltf2_blender_get
-def cached(func):
+def cached_by_key(key):
+ """
+ Decorates functions whose result should be cached. Use it like:
+ @cached_by_key(key=...)
+ def func(..., export_settings):
+ ...
+ The decorated function, func, must always take an "export_settings" arg
+ (the cache is stored here).
+ The key argument to the decorator is a function that computes the key to
+ cache on. It is passed all the arguments to func.
"""
- Decorate the cache gather functions results.
+ def inner(func):
+ @functools.wraps(func)
+ def wrapper_cached(*args, **kwargs):
+ if kwargs.get("export_settings"):
+ export_settings = kwargs["export_settings"]
+ else:
+ export_settings = args[-1]
+
+ cache_key = key(*args, **kwargs)
+
+ # invalidate cache if export settings have changed
+ if not hasattr(func, "__export_settings") or export_settings != func.__export_settings:
+ func.__cache = {}
+ func.__export_settings = export_settings
+ # use or fill cache
+ if cache_key in func.__cache:
+ return func.__cache[cache_key]
+ else:
+ result = func(*args, **kwargs)
+ func.__cache[cache_key] = result
+ return result
+
+ return wrapper_cached
+
+ return inner
- The gather function is only executed if its result isn't in the cache yet
- :param func: the function to be decorated. It will have a static __cache member afterwards
- :return:
+
+def default_key(*args, **kwargs):
+ """
+ Default cache key for @cached functions.
+ Cache on all arguments (except export_settings).
"""
+ assert len(args) >= 2 and 0 <= len(kwargs) <= 1, "Wrong signature for cached function"
+ cache_key_args = args
+ # make a shallow copy of the keyword arguments so that 'export_settings' can be removed
+ cache_key_kwargs = dict(kwargs)
+ if kwargs.get("export_settings"):
+ del cache_key_kwargs["export_settings"]
+ else:
+ cache_key_args = args[:-1]
+
+ cache_key = ()
+ for i in cache_key_args:
+ cache_key += (i,)
+ for i in cache_key_kwargs.values():
+ cache_key += (i,)
+
+ return cache_key
+
+
+def cached(func):
+ return cached_by_key(key=default_key)(func)
+
+def objectcache(func):
+
+ def reset_cache_objectcache():
+ func.__objectcache = {}
+
+ func.reset_cache = reset_cache_objectcache
+
@functools.wraps(func)
- def wrapper_cached(*args, **kwargs):
- assert len(args) >= 2 and 0 <= len(kwargs) <= 1, "Wrong signature for cached function"
+ def wrapper_objectcache(*args, **kwargs):
cache_key_args = args
- # make a shallow copy of the keyword arguments so that 'export_settings' can be removed
- cache_key_kwargs = dict(kwargs)
- if kwargs.get("export_settings"):
- export_settings = kwargs["export_settings"]
- # 'export_settings' should not be cached
- del cache_key_kwargs["export_settings"]
- else:
- export_settings = args[-1]
- cache_key_args = args[:-1]
+ cache_key_args = args[:-1]
- __by_name = [bpy.types.Object, bpy.types.Scene, bpy.types.Material, bpy.types.Action, bpy.types.Mesh, bpy.types.PoseBone]
+ if not hasattr(func, "__objectcache"):
+ func.reset_cache()
- # we make a tuple from the function arguments so that they can be used as a key to the cache
- cache_key = ()
- for i in cache_key_args:
- if type(i) in __by_name:
- cache_key += (i.name,)
- else:
- cache_key += (i,)
- for i in cache_key_kwargs.values():
- if type(i) in __by_name:
- cache_key += (i.name,)
- else:
- cache_key += (i,)
-
- # invalidate cache if export settings have changed
- if not hasattr(func, "__export_settings") or export_settings != func.__export_settings:
- func.__cache = {}
- func.__export_settings = export_settings
- # use or fill cache
- if cache_key in func.__cache:
- return func.__cache[cache_key]
- else:
+ # object is not cached yet
+ if cache_key_args[0] not in func.__objectcache.keys():
result = func(*args)
- func.__cache[cache_key] = result
- return result
- return wrapper_cached
+ func.__objectcache = result
+ return result[cache_key_args[0]][cache_key_args[1]][cache_key_args[4]]
+ # object is in cache, but not this action
+ # We need to keep other actions
+ elif cache_key_args[1] not in func.__objectcache[cache_key_args[0]].keys():
+ result = func(*args)
+ func.__objectcache[cache_key_args[0]][cache_key_args[1]] = result[cache_key_args[0]][cache_key_args[1]]
+ return result[cache_key_args[0]][cache_key_args[1]][cache_key_args[4]]
+ # all is already cached
+ else:
+ return func.__objectcache[cache_key_args[0]][cache_key_args[1]][cache_key_args[4]]
+ return wrapper_objectcache
def bonecache(func):
def reset_cache_bonecache():
func.__current_action_name = None
- func.__current_armature_name = None
+ func.__current_armature_uuid = None
func.__bonecache = {}
func.reset_cache = reset_cache_bonecache
@functools.wraps(func)
def wrapper_bonecache(*args, **kwargs):
- if args[2] is None:
- pose_bone_if_armature = gltf2_blender_get.get_object_from_datapath(args[0],
- args[1][0].data_path)
+
+ armature = args[-1]['vtree'].nodes[args[0]].blender_object
+
+ cache_key_args = args
+ cache_key_args = args[:-1]
+
+ if cache_key_args[2] is None:
+ pose_bone_if_armature = gltf2_blender_get.get_object_from_datapath(armature,
+ cache_key_args[1][0].data_path)
else:
- pose_bone_if_armature = args[0].pose.bones[args[2]]
+ pose_bone_if_armature = armature.pose.bones[cache_key_args[2]]
if not hasattr(func, "__current_action_name"):
func.reset_cache()
- if args[6] != func.__current_action_name or args[0] != func.__current_armature_name:
+ if cache_key_args[6] != func.__current_action_name or cache_key_args[0] != func.__current_armature_uuid:
result = func(*args)
func.__bonecache = result
- func.__current_action_name = args[6]
- func.__current_armature_name = args[0]
- return result[args[7]][pose_bone_if_armature.name]
+ func.__current_action_name = cache_key_args[6]
+ func.__current_armature_uuid = cache_key_args[0]
+ return result[cache_key_args[7]][pose_bone_if_armature.name]
else:
- return func.__bonecache[args[7]][pose_bone_if_armature.name]
+ return func.__bonecache[cache_key_args[7]][pose_bone_if_armature.name]
return wrapper_bonecache
# TODO: replace "cached" with "unique" in all cases where the caching is functional and not only for performance reasons
@@ -92,23 +143,27 @@ unique = cached
def skdriverdiscovercache(func):
def reset_cache_skdriverdiscovercache():
- func.__current_armature_name = None
+ func.__current_armature_uuid = None
func.__skdriverdiscover = {}
func.reset_cache = reset_cache_skdriverdiscovercache
@functools.wraps(func)
def wrapper_skdriverdiscover(*args, **kwargs):
- if not hasattr(func, "__current_armature_name") or func.__current_armature_name is None:
+
+ cache_key_args = args
+ cache_key_args = args[:-1]
+
+ if not hasattr(func, "__current_armature_uuid") or func.__current_armature_uuid is None:
func.reset_cache()
- if args[0] != func.__current_armature_name:
+ if cache_key_args[0] != func.__current_armature_uuid:
result = func(*args)
- func.__skdriverdiscover[args[0]] = result
- func.__current_armature_name = args[0]
+ func.__skdriverdiscover[cache_key_args[0]] = result
+ func.__current_armature_uuid = cache_key_args[0]
return result
else:
- return func.__skdriverdiscover[args[0]]
+ return func.__skdriverdiscover[cache_key_args[0]]
return wrapper_skdriverdiscover
def skdrivervalues(func):
@@ -123,12 +178,17 @@ def skdrivervalues(func):
if not hasattr(func, "__skdrivervalues") or func.__skdrivervalues is None:
func.reset_cache()
- if args[0].name not in func.__skdrivervalues.keys():
- func.__skdrivervalues[args[0].name] = {}
- if args[1] not in func.__skdrivervalues[args[0].name]:
+ armature = args[-1]['vtree'].nodes[args[0]].blender_object
+
+ cache_key_args = args
+ cache_key_args = args[:-1]
+
+ if armature.name not in func.__skdrivervalues.keys():
+ func.__skdrivervalues[armature.name] = {}
+ if cache_key_args[1] not in func.__skdrivervalues[armature.name]:
vals = func(*args)
- func.__skdrivervalues[args[0].name][args[1]] = vals
+ func.__skdrivervalues[armature.name][cache_key_args[1]] = vals
return vals
else:
- return func.__skdrivervalues[args[0].name][args[1]]
+ return func.__skdrivervalues[armature.name][cache_key_args[1]]
return wrapper_skdrivervalues
diff --git a/io_scene_gltf2/blender/exp/gltf2_blender_gather_drivers.py b/io_scene_gltf2/blender/exp/gltf2_blender_gather_drivers.py
index 1f82c2b3..4e77f60e 100644
--- a/io_scene_gltf2/blender/exp/gltf2_blender_gather_drivers.py
+++ b/io_scene_gltf2/blender/exp/gltf2_blender_gather_drivers.py
@@ -5,13 +5,20 @@
from io_scene_gltf2.blender.exp.gltf2_blender_gather_cache import skdriverdiscovercache, skdrivervalues
from io_scene_gltf2.blender.com.gltf2_blender_data_path import get_target_object_path
-
@skdriverdiscovercache
-def get_sk_drivers(blender_armature):
+def get_sk_drivers(blender_armature_uuid, export_settings):
+
+ blender_armature = export_settings['vtree'].nodes[blender_armature_uuid].blender_object
drivers = []
- for child in blender_armature.children:
+ for child_uuid in export_settings['vtree'].nodes[blender_armature_uuid].children:
+
+ if export_settings['vtree'].nodes[child_uuid].blender_type == "BONE":
+ continue
+
+ child = export_settings['vtree'].nodes[child_uuid].blender_object
+
if not child.data:
continue
# child.data can be an armature - which has no shapekeys
@@ -63,13 +70,14 @@ def get_sk_drivers(blender_armature):
all_sorted_channels.append(existing_idx[i])
if len(all_sorted_channels) > 0:
- drivers.append((child, tuple(all_sorted_channels)))
+ drivers.append((child_uuid, tuple(all_sorted_channels)))
return tuple(drivers)
@skdrivervalues
-def get_sk_driver_values(blender_object, frame, fcurves):
+def get_sk_driver_values(blender_object_uuid, frame, fcurves, export_settings):
sk_values = []
+ blender_object = export_settings['vtree'].nodes[blender_object_uuid].blender_object
for f in [f for f in fcurves if f is not None]:
sk_values.append(blender_object.data.shape_keys.path_resolve(get_target_object_path(f.data_path)).value)
diff --git a/io_scene_gltf2/blender/exp/gltf2_blender_gather_joints.py b/io_scene_gltf2/blender/exp/gltf2_blender_gather_joints.py
index ffc1231b..f4fd6c51 100755
--- a/io_scene_gltf2/blender/exp/gltf2_blender_gather_joints.py
+++ b/io_scene_gltf2/blender/exp/gltf2_blender_gather_joints.py
@@ -1,7 +1,7 @@
# SPDX-License-Identifier: Apache-2.0
# Copyright 2018-2021 The glTF-Blender-IO authors.
-import mathutils
+from mathutils import Matrix, Quaternion, Vector
from . import gltf2_blender_export_keys
from io_scene_gltf2.blender.exp.gltf2_blender_gather_cache import cached
@@ -9,9 +9,40 @@ from io_scene_gltf2.io.com import gltf2_io
from io_scene_gltf2.blender.exp import gltf2_blender_gather_skins
from io_scene_gltf2.io.exp.gltf2_io_user_extensions import export_user_extensions
from ..com.gltf2_blender_extras import generate_extras
+from io_scene_gltf2.blender.exp import gltf2_blender_gather_tree
+
+
+
+# TODO these 3 functions move to shared file
+def __convert_swizzle_location(loc, export_settings):
+ """Convert a location from Blender coordinate system to glTF coordinate system."""
+ if export_settings[gltf2_blender_export_keys.YUP]:
+ return Vector((loc[0], loc[2], -loc[1]))
+ else:
+ return Vector((loc[0], loc[1], loc[2]))
+
+
+def __convert_swizzle_rotation(rot, export_settings):
+ """
+ Convert a quaternion rotation from Blender coordinate system to glTF coordinate system.
+
+ 'w' is still at first position.
+ """
+ if export_settings[gltf2_blender_export_keys.YUP]:
+ return Quaternion((rot[0], rot[1], rot[3], -rot[2]))
+ else:
+ return Quaternion((rot[0], rot[1], rot[2], rot[3]))
+
+
+def __convert_swizzle_scale(scale, export_settings):
+ """Convert a scale from Blender coordinate system to glTF coordinate system."""
+ if export_settings[gltf2_blender_export_keys.YUP]:
+ return Vector((scale[0], scale[2], scale[1]))
+ else:
+ return Vector((scale[0], scale[1], scale[2]))
@cached
-def gather_joint(blender_object, blender_bone, export_settings):
+def gather_joint_vnode(vnode, export_settings):
"""
Generate a glTF2 node from a blender bone, as joints in glTF2 are simply nodes.
@@ -19,28 +50,19 @@ def gather_joint(blender_object, blender_bone, export_settings):
:param export_settings: the settings for this export
:return: a glTF2 node (acting as a joint)
"""
- axis_basis_change = mathutils.Matrix.Identity(4)
- if export_settings[gltf2_blender_export_keys.YUP]:
- axis_basis_change = mathutils.Matrix(
- ((1.0, 0.0, 0.0, 0.0), (0.0, 0.0, 1.0, 0.0), (0.0, -1.0, 0.0, 0.0), (0.0, 0.0, 0.0, 1.0)))
+ vtree = export_settings['vtree']
+ blender_object = vtree.nodes[vnode].blender_object
+ blender_bone = vtree.nodes[vnode].blender_bone
- # extract bone transform
- if blender_bone.parent is None:
- correction_matrix_local = axis_basis_change @ blender_bone.bone.matrix_local
- else:
- correction_matrix_local = (
- blender_bone.parent.bone.matrix_local.inverted_safe() @
- blender_bone.bone.matrix_local
- )
-
- if (blender_bone.bone.use_inherit_rotation == False or blender_bone.bone.inherit_scale != "FULL") and blender_bone.parent != None:
- rest_mat = (blender_bone.parent.bone.matrix_local.inverted_safe() @ blender_bone.bone.matrix_local)
- matrix_basis = (rest_mat.inverted_safe() @ blender_bone.parent.matrix.inverted_safe() @ blender_bone.matrix)
- else:
- matrix_basis = blender_bone.matrix
- matrix_basis = blender_object.convert_space(pose_bone=blender_bone, matrix=matrix_basis, from_space='POSE', to_space='LOCAL')
- trans, rot, sca = (correction_matrix_local @ matrix_basis).decompose()
+ mat = vtree.nodes[vtree.nodes[vnode].parent_uuid].matrix_world.inverted_safe() @ vtree.nodes[vnode].matrix_world
+
+ trans, rot, sca = mat.decompose()
+
+ trans = __convert_swizzle_location(trans, export_settings)
+ rot = __convert_swizzle_rotation(rot, export_settings)
+ sca = __convert_swizzle_scale(sca, export_settings)
+
translation, rotation, scale = (None, None, None)
if trans[0] != 0.0 or trans[1] != 0.0 or trans[2] != 0.0:
translation = [trans[0], trans[1], trans[2]]
@@ -52,14 +74,8 @@ def gather_joint(blender_object, blender_bone, export_settings):
# traverse into children
children = []
- if export_settings["gltf_def_bones"] is False:
- for bone in blender_bone.children:
- children.append(gather_joint(blender_object, bone, export_settings))
- else:
- _, children_, _ = gltf2_blender_gather_skins.get_bone_tree(None, blender_bone.id_data)
- if blender_bone.name in children_.keys():
- for bone in children_[blender_bone.name]:
- children.append(gather_joint(blender_object, blender_bone.id_data.pose.bones[bone], export_settings))
+ for bone_uuid in [c for c in vtree.nodes[vnode].children if vtree.nodes[c].blender_type == gltf2_blender_gather_tree.VExportNode.BONE]:
+ children.append(gather_joint_vnode(bone_uuid, export_settings))
# finally add to the joints array containing all the joints in the hierarchy
node = gltf2_io.Node(
@@ -79,6 +95,8 @@ def gather_joint(blender_object, blender_bone, export_settings):
export_user_extensions('gather_joint_hook', export_settings, node, blender_bone)
+ vtree.nodes[vnode].node = node
+
return node
def __gather_extras(blender_bone, export_settings):
diff --git a/io_scene_gltf2/blender/exp/gltf2_blender_gather_materials.py b/io_scene_gltf2/blender/exp/gltf2_blender_gather_materials.py
index 2d83b0dd..04129996 100755
--- a/io_scene_gltf2/blender/exp/gltf2_blender_gather_materials.py
+++ b/io_scene_gltf2/blender/exp/gltf2_blender_gather_materials.py
@@ -3,7 +3,7 @@
import bpy
-from io_scene_gltf2.blender.exp.gltf2_blender_gather_cache import cached
+from io_scene_gltf2.blender.exp.gltf2_blender_gather_cache import cached, cached_by_key
from io_scene_gltf2.io.com import gltf2_io
from io_scene_gltf2.io.com.gltf2_io_extensions import Extension
from io_scene_gltf2.blender.exp import gltf2_blender_gather_texture_info, gltf2_blender_export_keys
@@ -16,8 +16,14 @@ from io_scene_gltf2.blender.exp import gltf2_blender_get
from io_scene_gltf2.io.exp.gltf2_io_user_extensions import export_user_extensions
from io_scene_gltf2.io.com.gltf2_io_debug import print_console
-
@cached
+def get_material_cache_key(blender_material, export_settings):
+ # Use id of material
+ # Do not use bpy.types that can be unhashable
+ # Do not use material name, that can be not unique (when linked)
+ return ((id(blender_material),))
+
+@cached_by_key(key=get_material_cache_key)
def gather_material(blender_material, export_settings):
"""
Gather the material used by the blender primitive.
@@ -49,6 +55,12 @@ def gather_material(blender_material, export_settings):
pbr_metallic_roughness=__gather_pbr_metallic_roughness(blender_material, orm_texture, export_settings)
)
+ # If emissive is set, from an emissive node (not PBR)
+ # We need to set manually default values for
+ # pbr_metallic_roughness.baseColor
+ if material.emissive_factor is not None and gltf2_blender_get.get_node_socket(blender_material, bpy.types.ShaderNodeBsdfPrincipled, "Base Color") is None:
+ material.pbr_metallic_roughness = gltf2_blender_gather_materials_pbr_metallic_roughness.get_default_pbr_for_emissive_node()
+
export_user_extensions('gather_material_hook', export_settings, material, blender_material)
return material
diff --git a/io_scene_gltf2/blender/exp/gltf2_blender_gather_materials_pbr_metallic_roughness.py b/io_scene_gltf2/blender/exp/gltf2_blender_gather_materials_pbr_metallic_roughness.py
index 3e18dbad..9395aa43 100755
--- a/io_scene_gltf2/blender/exp/gltf2_blender_gather_materials_pbr_metallic_roughness.py
+++ b/io_scene_gltf2/blender/exp/gltf2_blender_gather_materials_pbr_metallic_roughness.py
@@ -153,3 +153,14 @@ def __has_image_node_from_socket(socket):
if not result:
return False
return True
+
+def get_default_pbr_for_emissive_node():
+ return gltf2_io.MaterialPBRMetallicRoughness(
+ base_color_factor=[0.0,0.0,0.0,1.0],
+ base_color_texture=None,
+ extensions=None,
+ extras=None,
+ metallic_factor=None,
+ metallic_roughness_texture=None,
+ roughness_factor=None
+ )
diff --git a/io_scene_gltf2/blender/exp/gltf2_blender_gather_mesh.py b/io_scene_gltf2/blender/exp/gltf2_blender_gather_mesh.py
index ac90e4cd..c8987127 100755
--- a/io_scene_gltf2/blender/exp/gltf2_blender_gather_mesh.py
+++ b/io_scene_gltf2/blender/exp/gltf2_blender_gather_mesh.py
@@ -4,7 +4,7 @@
import bpy
from typing import Optional, Dict, List, Any, Tuple
from .gltf2_blender_export_keys import MORPH
-from io_scene_gltf2.blender.exp.gltf2_blender_gather_cache import cached
+from io_scene_gltf2.blender.exp.gltf2_blender_gather_cache import cached, cached_by_key
from io_scene_gltf2.io.com import gltf2_io
from io_scene_gltf2.blender.exp import gltf2_blender_gather_primitives
from ..com.gltf2_blender_extras import generate_extras
@@ -13,30 +13,64 @@ from io_scene_gltf2.io.exp.gltf2_io_user_extensions import export_user_extension
@cached
+def get_mesh_cache_key(blender_mesh,
+ blender_object,
+ vertex_groups,
+ modifiers,
+ skip_filter,
+ materials,
+ original_mesh,
+ export_settings):
+ # Use id of original mesh
+ # Do not use bpy.types that can be unhashable
+ # Do not use mesh name, that can be not unique (when linked)
+
+ # If materials are not exported, no need to cache by material
+ if export_settings['gltf_materials'] is None:
+ mats = None
+ else:
+ mats = tuple(id(m) if m is not None else None for m in materials)
+
+ # TODO check what is really needed for modifiers
+
+ mesh_to_id_cache = blender_mesh if original_mesh is None else original_mesh
+ return (
+ (id(mesh_to_id_cache),),
+ (modifiers,),
+ (skip_filter,), #TODO to check if still needed
+ mats
+ )
+
+@cached_by_key(key=get_mesh_cache_key)
def gather_mesh(blender_mesh: bpy.types.Mesh,
- library: Optional[str],
- blender_object: Optional[bpy.types.Object],
+ uuid_for_skined_data,
vertex_groups: Optional[bpy.types.VertexGroups],
modifiers: Optional[bpy.types.ObjectModifiers],
skip_filter: bool,
- material_names: Tuple[str],
+ materials: Tuple[bpy.types.Material],
+ original_mesh: bpy.types.Mesh,
export_settings
) -> Optional[gltf2_io.Mesh]:
- if not skip_filter and not __filter_mesh(blender_mesh, library, vertex_groups, modifiers, export_settings):
+ if not skip_filter and not __filter_mesh(blender_mesh, vertex_groups, modifiers, export_settings):
return None
mesh = gltf2_io.Mesh(
- extensions=__gather_extensions(blender_mesh, library, vertex_groups, modifiers, export_settings),
- extras=__gather_extras(blender_mesh, library, vertex_groups, modifiers, export_settings),
- name=__gather_name(blender_mesh, library, vertex_groups, modifiers, export_settings),
- weights=__gather_weights(blender_mesh, library, vertex_groups, modifiers, export_settings),
- primitives=__gather_primitives(blender_mesh, library, blender_object, vertex_groups, modifiers, material_names, export_settings),
+ extensions=__gather_extensions(blender_mesh, vertex_groups, modifiers, export_settings),
+ extras=__gather_extras(blender_mesh, vertex_groups, modifiers, export_settings),
+ name=__gather_name(blender_mesh, vertex_groups, modifiers, export_settings),
+ weights=__gather_weights(blender_mesh, vertex_groups, modifiers, export_settings),
+ primitives=__gather_primitives(blender_mesh, uuid_for_skined_data, vertex_groups, modifiers, materials, export_settings),
)
if len(mesh.primitives) == 0:
print_console("WARNING", "Mesh '{}' has no primitives and will be omitted.".format(mesh.name))
return None
+ blender_object = None
+ if uuid_for_skined_data:
+ blender_object = export_settings['vtree'].nodes[uuid_for_skined_data].blender_object
+
+
export_user_extensions('gather_mesh_hook',
export_settings,
mesh,
@@ -45,13 +79,12 @@ def gather_mesh(blender_mesh: bpy.types.Mesh,
vertex_groups,
modifiers,
skip_filter,
- material_names)
+ materials)
return mesh
def __filter_mesh(blender_mesh: bpy.types.Mesh,
- library: Optional[str],
vertex_groups: Optional[bpy.types.VertexGroups],
modifiers: Optional[bpy.types.ObjectModifiers],
export_settings
@@ -63,7 +96,6 @@ def __filter_mesh(blender_mesh: bpy.types.Mesh,
def __gather_extensions(blender_mesh: bpy.types.Mesh,
- library: Optional[str],
vertex_groups: Optional[bpy.types.VertexGroups],
modifiers: Optional[bpy.types.ObjectModifiers],
export_settings
@@ -72,7 +104,6 @@ def __gather_extensions(blender_mesh: bpy.types.Mesh,
def __gather_extras(blender_mesh: bpy.types.Mesh,
- library: Optional[str],
vertex_groups: Optional[bpy.types.VertexGroups],
modifiers: Optional[bpy.types.ObjectModifiers],
export_settings
@@ -100,7 +131,6 @@ def __gather_extras(blender_mesh: bpy.types.Mesh,
def __gather_name(blender_mesh: bpy.types.Mesh,
- library: Optional[str],
vertex_groups: Optional[bpy.types.VertexGroups],
modifiers: Optional[bpy.types.ObjectModifiers],
export_settings
@@ -109,24 +139,21 @@ def __gather_name(blender_mesh: bpy.types.Mesh,
def __gather_primitives(blender_mesh: bpy.types.Mesh,
- library: Optional[str],
- blender_object: Optional[bpy.types.Object],
+ uuid_for_skined_data,
vertex_groups: Optional[bpy.types.VertexGroups],
modifiers: Optional[bpy.types.ObjectModifiers],
- material_names: Tuple[str],
+ materials: Tuple[bpy.types.Material],
export_settings
) -> List[gltf2_io.MeshPrimitive]:
return gltf2_blender_gather_primitives.gather_primitives(blender_mesh,
- library,
- blender_object,
+ uuid_for_skined_data,
vertex_groups,
modifiers,
- material_names,
+ materials,
export_settings)
def __gather_weights(blender_mesh: bpy.types.Mesh,
- library: Optional[str],
vertex_groups: Optional[bpy.types.VertexGroups],
modifiers: Optional[bpy.types.ObjectModifiers],
export_settings
diff --git a/io_scene_gltf2/blender/exp/gltf2_blender_gather_nodes.py b/io_scene_gltf2/blender/exp/gltf2_blender_gather_nodes.py
index a69f5d58..25784960 100755
--- a/io_scene_gltf2/blender/exp/gltf2_blender_gather_nodes.py
+++ b/io_scene_gltf2/blender/exp/gltf2_blender_gather_nodes.py
@@ -13,135 +13,47 @@ from io_scene_gltf2.blender.exp import gltf2_blender_gather_cameras
from io_scene_gltf2.blender.exp import gltf2_blender_gather_mesh
from io_scene_gltf2.blender.exp import gltf2_blender_gather_joints
from io_scene_gltf2.blender.exp import gltf2_blender_gather_lights
+from io_scene_gltf2.blender.exp.gltf2_blender_gather_tree import VExportNode
from ..com.gltf2_blender_extras import generate_extras
from io_scene_gltf2.io.com import gltf2_io
from io_scene_gltf2.io.com import gltf2_io_extensions
from io_scene_gltf2.io.exp.gltf2_io_user_extensions import export_user_extensions
from io_scene_gltf2.io.com.gltf2_io_debug import print_console
+from io_scene_gltf2.blender.exp import gltf2_blender_gather_tree
-def gather_node(blender_object, library, blender_scene, dupli_object_parent, export_settings):
- # custom cache to avoid cache miss when called from animation
- # with blender_scene=None
-
- # invalidate cache if export settings have changed
- if not hasattr(gather_node, "__export_settings") or export_settings != gather_node.__export_settings:
- gather_node.__cache = {}
- gather_node.__export_settings = export_settings
-
- if blender_scene is None and (blender_object.name, library) in gather_node.__cache:
- return gather_node.__cache[(blender_object.name, library)]
-
- node = __gather_node(blender_object, library, blender_scene, dupli_object_parent, export_settings)
- gather_node.__cache[(blender_object.name, library)] = node
- return node
-
-@cached
-def __gather_node(blender_object, library, blender_scene, dupli_object_parent, export_settings):
- children, only_bone_children = __gather_children(blender_object, blender_scene, export_settings)
-
- camera = None
- mesh = None
- skin = None
- weights = None
-
- # If blender_scene is None, we are coming from animation export
- # Check to know if object is exported is already done, so we don't check
- # again if object is instanced in scene : this check was already done when exporting object itself
- if not __filter_node(blender_object, blender_scene, export_settings):
- if children:
- # This node should be filtered out, but has un-filtered children present.
- # So, export this node, excluding its camera, mesh, skin, and weights.
- # The transformations and animations on this node will have visible effects on children.
-
- # Armature always have children node(s) (that are bone(s))
- # We have to check if children are only bones or not for armatures
- if blender_object.type == "ARMATURE" and only_bone_children is True:
- return None
-
- pass
- else:
- # This node is filtered out, and has no un-filtered children or descendants.
- return None
- else:
- # This node is being fully exported.
- camera = __gather_camera(blender_object, export_settings)
- mesh = __gather_mesh(blender_object, library, export_settings)
- skin = __gather_skin(blender_object, export_settings)
- weights = __gather_weights(blender_object, export_settings)
+def gather_node(vnode, export_settings):
+ blender_object = vnode.blender_object
+ skin = __gather_skin(vnode, blender_object, export_settings)
node = gltf2_io.Node(
- camera=camera,
- children=children,
+ camera=__gather_camera(blender_object, export_settings),
+ children=__gather_children(vnode, blender_object, export_settings),
extensions=__gather_extensions(blender_object, export_settings),
extras=__gather_extras(blender_object, export_settings),
matrix=__gather_matrix(blender_object, export_settings),
- mesh=mesh,
+ mesh=__gather_mesh(vnode, blender_object, export_settings),
name=__gather_name(blender_object, export_settings),
rotation=None,
scale=None,
skin=skin,
translation=None,
- weights=weights
+ weights=__gather_weights(blender_object, export_settings)
)
# If node mesh is skined, transforms should be ignored at import, so no need to set them here
if node.skin is None:
- node.translation, node.rotation, node.scale = __gather_trans_rot_scale(blender_object, export_settings)
+ node.translation, node.rotation, node.scale = __gather_trans_rot_scale(vnode, export_settings)
- if export_settings[gltf2_blender_export_keys.YUP]:
- # Checking node.extensions is making sure that the type of lamp is managed, and will be exported
- if blender_object.type == 'LIGHT' and export_settings[gltf2_blender_export_keys.LIGHTS] and node.extensions:
- correction_node = __get_correction_node(blender_object, export_settings)
- correction_node.extensions = {"KHR_lights_punctual": node.extensions["KHR_lights_punctual"]}
- del node.extensions["KHR_lights_punctual"]
- node.children.append(correction_node)
- if blender_object.type == 'CAMERA' and export_settings[gltf2_blender_export_keys.CAMERAS]:
- correction_node = __get_correction_node(blender_object, export_settings)
- correction_node.camera = node.camera
- node.children.append(correction_node)
- node.camera = None
export_user_extensions('gather_node_hook', export_settings, node, blender_object)
- return node
-
+ vnode.node = node
-def __filter_node(blender_object, blender_scene, export_settings):
- if blender_object.users == 0:
- return False
- if blender_scene is not None:
- instanced = any([blender_object.name in layer.objects for layer in blender_scene.view_layers])
- if instanced is False:
- # Check if object is from a linked collection
- if any([blender_object.name in coll.objects for coll in bpy.data.collections if coll.library is not None]):
- pass
- else:
- # Not instanced, not linked -> We don't keep this object
- return False
- if export_settings[gltf2_blender_export_keys.SELECTED] and blender_object.select_get() is False:
- return False
+ if node.skin is not None:
+ vnode.skin = skin
- if export_settings[gltf2_blender_export_keys.VISIBLE] and blender_object.visible_get() is False:
- return False
-
- # render_get() doesn't exist, so unfortunately this won't take into account the Collection settings
- if export_settings[gltf2_blender_export_keys.RENDERABLE] and blender_object.hide_render is True:
- return False
-
- if export_settings[gltf2_blender_export_keys.ACTIVE_COLLECTION]:
- found = any(x == blender_object for x in bpy.context.collection.all_objects)
-
- if not found:
- return False
-
- if blender_object.type == 'LIGHT':
- return export_settings[gltf2_blender_export_keys.LIGHTS]
-
- if blender_object.type == 'CAMERA':
- return export_settings[gltf2_blender_export_keys.CAMERAS]
-
- return True
+ return node
def __gather_camera(blender_object, export_settings):
@@ -151,54 +63,35 @@ def __gather_camera(blender_object, export_settings):
return gltf2_blender_gather_cameras.gather_camera(blender_object.data, export_settings)
-def __gather_children(blender_object, blender_scene, export_settings):
+def __gather_children(vnode, blender_object, export_settings):
children = []
- only_bone_children = True # True by default, will be set to False if needed
- # standard children
- for child_object in blender_object.children:
- if child_object.parent_bone:
- # this is handled further down,
- # as the object should be a child of the specific bone,
- # not the Armature object
- continue
-
- node = gather_node(child_object,
- child_object.library.name if child_object.library else None,
- blender_scene, None, export_settings)
+
+ vtree = export_settings['vtree']
+
+ # Standard Children / Collection
+ for c in [vtree.nodes[c] for c in vnode.children if vtree.nodes[c].blender_type != gltf2_blender_gather_tree.VExportNode.BONE]:
+ node = gather_node(c, export_settings)
if node is not None:
children.append(node)
- only_bone_children = False
- # blender dupli objects
- if blender_object.instance_type == 'COLLECTION' and blender_object.instance_collection:
- for dupli_object in blender_object.instance_collection.objects:
- if dupli_object.parent is not None:
- continue
- if dupli_object.type == "ARMATURE":
- continue # There is probably a proxy (no more existing)
- node = gather_node(dupli_object,
- dupli_object.library.name if dupli_object.library else None,
- blender_scene, blender_object.name, export_settings)
- if node is not None:
- children.append(node)
- only_bone_children = False
-
- # blender bones
- if blender_object.type == "ARMATURE":
+
+
+ # Armature --> Retrieve Blender bones
+ if vnode.blender_type == gltf2_blender_gather_tree.VExportNode.ARMATURE:
root_joints = []
- if export_settings["gltf_def_bones"] is False:
- bones = blender_object.pose.bones
- else:
- bones, _, _ = gltf2_blender_gather_skins.get_bone_tree(None, blender_object)
- bones = [blender_object.pose.bones[b.name] for b in bones]
- for blender_bone in bones:
- if not blender_bone.parent:
- joint = gltf2_blender_gather_joints.gather_joint(blender_object, blender_bone, export_settings)
- children.append(joint)
- root_joints.append(joint)
- # handle objects directly parented to bones
- direct_bone_children = [child for child in blender_object.children if child.parent_bone]
- if len(direct_bone_children) != 0:
- only_bone_children = False
+
+ all_armature_children = vnode.children
+ root_bones_uuid = [c for c in all_armature_children if export_settings['vtree'].nodes[c].blender_type == VExportNode.BONE]
+ for bone_uuid in root_bones_uuid:
+ joint = gltf2_blender_gather_joints.gather_joint_vnode(bone_uuid, export_settings)
+ children.append(joint)
+ root_joints.append(joint)
+
+ # Object parented to bones
+ direct_bone_children = []
+ for n in [vtree.nodes[i] for i in vtree.get_all_bones(vnode.uuid)]:
+ direct_bone_children.extend([c for c in n.children if vtree.nodes[c].blender_type != gltf2_blender_gather_tree.VExportNode.BONE])
+
+
def find_parent_joint(joints, name):
for joint in joints:
if joint.name == name:
@@ -207,44 +100,40 @@ def __gather_children(blender_object, blender_scene, export_settings):
if parent_joint:
return parent_joint
return None
- for child in direct_bone_children:
+
+ for child in direct_bone_children: # List of object that are parented to bones
# find parent joint
- parent_joint = find_parent_joint(root_joints, child.parent_bone)
+ parent_joint = find_parent_joint(root_joints, vtree.nodes[child].blender_object.parent_bone)
if not parent_joint:
continue
- child_node = gather_node(child, None, blender_scene, None, export_settings)
+ child_node = gather_node(vtree.nodes[child], export_settings)
if child_node is None:
continue
blender_bone = blender_object.pose.bones[parent_joint.name]
- # fix rotation
- if export_settings[gltf2_blender_export_keys.YUP]:
- rot = child_node.rotation
- if rot is None:
- rot = [0, 0, 0, 1]
-
- rot_quat = Quaternion(rot)
- axis_basis_change = Matrix(
- ((1.0, 0.0, 0.0, 0.0), (0.0, 0.0, -1.0, 0.0), (0.0, 1.0, 0.0, 0.0), (0.0, 0.0, 0.0, 1.0)))
- mat = child.matrix_parent_inverse @ child.matrix_basis
- mat = mat @ axis_basis_change
-
- _, rot_quat, _ = mat.decompose()
- child_node.rotation = [rot_quat[1], rot_quat[2], rot_quat[3], rot_quat[0]]
-
- # fix translation (in blender bone's tail is the origin for children)
- trans, _, _ = child.matrix_local.decompose()
- if trans is None:
- trans = [0, 0, 0]
- # bones go down their local y axis
- if blender_bone.matrix.to_scale()[1] >= 1e-6:
- bone_tail = [0, blender_bone.length / blender_bone.matrix.to_scale()[1], 0]
- else:
- bone_tail = [0,0,0] # If scale is 0, tail == head
- child_node.translation = [trans[idx] + bone_tail[idx] for idx in range(3)]
+
+ mat = vtree.nodes[vtree.nodes[child].parent_bone_uuid].matrix_world.inverted_safe() @ vtree.nodes[child].matrix_world
+ loc, rot_quat, scale = mat.decompose()
+
+ trans = __convert_swizzle_location(loc, export_settings)
+ rot = __convert_swizzle_rotation(rot_quat, export_settings)
+ sca = __convert_swizzle_scale(scale, export_settings)
+
+
+ translation, rotation, scale = (None, None, None)
+ if trans[0] != 0.0 or trans[1] != 0.0 or trans[2] != 0.0:
+ translation = [trans[0], trans[1], trans[2]]
+ if rot[0] != 1.0 or rot[1] != 0.0 or rot[2] != 0.0 or rot[3] != 0.0:
+ rotation = [rot[1], rot[2], rot[3], rot[0]]
+ if sca[0] != 1.0 or sca[1] != 1.0 or sca[2] != 1.0:
+ scale = [sca[0], sca[1], sca[2]]
+
+ child_node.translation = translation
+ child_node.rotation = rotation
+ child_node.scale = scale
parent_joint.children.append(child_node)
- return children, only_bone_children
+ return children
def __gather_extensions(blender_object, export_settings):
@@ -283,13 +172,17 @@ def __gather_matrix(blender_object, export_settings):
return []
-def __gather_mesh(blender_object, library, export_settings):
+def __gather_mesh(vnode, blender_object, export_settings):
if blender_object.type in ['CURVE', 'SURFACE', 'FONT']:
- return __gather_mesh_from_nonmesh(blender_object, library, export_settings)
+ return __gather_mesh_from_nonmesh(blender_object, export_settings)
if blender_object.type != "MESH":
return None
+ # For duplis instancer, when show is off -> export as empty
+ if vnode.force_as_empty is True:
+ return None
+
# Be sure that object is valid (no NaN for example)
blender_object.data.validate()
@@ -301,6 +194,8 @@ def __gather_mesh(blender_object, library, export_settings):
if len(modifiers) == 0:
modifiers = None
+ # TODO for objects without any modifiers, we can keep original mesh_data
+ # It will instance mesh in glTF
if export_settings[gltf2_blender_export_keys.APPLY]:
armature_modifiers = {}
if export_settings[gltf2_blender_export_keys.SKINS]:
@@ -335,24 +230,23 @@ def __gather_mesh(blender_object, library, export_settings):
modifiers = None
materials = tuple(ms.material for ms in blender_object.material_slots)
- material_names = tuple(None if mat is None else mat.name for mat in materials)
# retrieve armature
# Because mesh data will be transforms to skeleton space,
# we can't instantiate multiple object at different location, skined by same armature
- blender_object_for_skined_data = None
+ uuid_for_skined_data = None
if export_settings[gltf2_blender_export_keys.SKINS]:
for idx, modifier in enumerate(blender_object.modifiers):
if modifier.type == 'ARMATURE':
- blender_object_for_skined_data = blender_object
+ uuid_for_skined_data = vnode.uuid
result = gltf2_blender_gather_mesh.gather_mesh(blender_mesh,
- library,
- blender_object_for_skined_data,
+ uuid_for_skined_data,
vertex_groups,
modifiers,
skip_filter,
- material_names,
+ materials,
+ None,
export_settings)
if export_settings[gltf2_blender_export_keys.APPLY]:
@@ -361,7 +255,7 @@ def __gather_mesh(blender_object, library, export_settings):
return result
-def __gather_mesh_from_nonmesh(blender_object, library, export_settings):
+def __gather_mesh_from_nonmesh(blender_object, export_settings):
"""Handles curves, surfaces, text, etc."""
needs_to_mesh_clear = False
try:
@@ -387,18 +281,18 @@ def __gather_mesh_from_nonmesh(blender_object, library, export_settings):
needs_to_mesh_clear = True
skip_filter = True
- material_names = tuple([ms.material.name for ms in blender_object.material_slots if ms.material is not None])
+ materials = tuple([ms.material for ms in blender_object.material_slots if ms.material is not None])
vertex_groups = None
modifiers = None
blender_object_for_skined_data = None
result = gltf2_blender_gather_mesh.gather_mesh(blender_mesh,
- library,
blender_object_for_skined_data,
vertex_groups,
modifiers,
skip_filter,
- material_names,
+ materials,
+ blender_object.data,
export_settings)
finally:
@@ -411,33 +305,15 @@ def __gather_mesh_from_nonmesh(blender_object, library, export_settings):
def __gather_name(blender_object, export_settings):
return blender_object.name
-
-def __gather_trans_rot_scale(blender_object, export_settings):
- if blender_object.matrix_parent_inverse == Matrix.Identity(4):
- trans = blender_object.location
-
- if blender_object.rotation_mode in ['QUATERNION', 'AXIS_ANGLE']:
- rot = blender_object.rotation_quaternion
- else:
- rot = blender_object.rotation_euler.to_quaternion()
-
- sca = blender_object.scale
+def __gather_trans_rot_scale(vnode, export_settings):
+ if vnode.parent_uuid is None:
+ # No parent, so matrix is world matrix
+ trans, rot, sca = vnode.matrix_world.decompose()
else:
- # matrix_local = matrix_parent_inverse*location*rotation*scale
- # Decomposing matrix_local gives less accuracy, but is needed if matrix_parent_inverse is not the identity.
+ # calculate local matrix
+ trans, rot, sca = (export_settings['vtree'].nodes[vnode.parent_uuid].matrix_world.inverted_safe() @ vnode.matrix_world).decompose()
- if blender_object.matrix_local[3][3] != 0.0:
- trans, rot, sca = blender_object.matrix_local.decompose()
- else:
- # Some really weird cases, scale is null (if parent is null when evaluation is done)
- print_console('WARNING', 'Some nodes are 0 scaled during evaluation. Result can be wrong')
- trans = blender_object.location
- if blender_object.rotation_mode in ['QUATERNION', 'AXIS_ANGLE']:
- rot = blender_object.rotation_quaternion
- else:
- rot = blender_object.rotation_euler.to_quaternion()
- sca = blender_object.scale
# make sure the rotation is normalized
rot.normalize()
@@ -446,9 +322,9 @@ def __gather_trans_rot_scale(blender_object, export_settings):
rot = __convert_swizzle_rotation(rot, export_settings)
sca = __convert_swizzle_scale(sca, export_settings)
- if blender_object.instance_type == 'COLLECTION' and blender_object.instance_collection:
+ if vnode.blender_object.instance_type == 'COLLECTION' and vnode.blender_object.instance_collection:
offset = -__convert_swizzle_location(
- blender_object.instance_collection.instance_offset, export_settings)
+ vnode.blender_object.instance_collection.instance_offset, export_settings)
s = Matrix.Diagonal(sca).to_4x4()
r = rot.to_matrix().to_4x4()
@@ -473,8 +349,7 @@ def __gather_trans_rot_scale(blender_object, export_settings):
scale = [sca[0], sca[1], sca[2]]
return translation, rotation, scale
-
-def __gather_skin(blender_object, export_settings):
+def __gather_skin(vnode, blender_object, export_settings):
modifiers = {m.type: m for m in blender_object.modifiers}
if "ARMATURE" not in modifiers or modifiers["ARMATURE"].object is None:
return None
@@ -501,34 +376,12 @@ def __gather_skin(blender_object, export_settings):
return None
# Skins and meshes must be in the same glTF node, which is different from how blender handles armatures
- return gltf2_blender_gather_skins.gather_skin(modifiers["ARMATURE"].object, export_settings)
+ return gltf2_blender_gather_skins.gather_skin(vnode.armature, export_settings)
def __gather_weights(blender_object, export_settings):
return None
-
-def __get_correction_node(blender_object, export_settings):
- correction_quaternion = __convert_swizzle_rotation(
- Quaternion((1.0, 0.0, 0.0), math.radians(-90.0)), export_settings)
- correction_quaternion = [correction_quaternion[1], correction_quaternion[2],
- correction_quaternion[3], correction_quaternion[0]]
- return gltf2_io.Node(
- camera=None,
- children=[],
- extensions=None,
- extras=None,
- matrix=None,
- mesh=None,
- name=blender_object.name + '_Orientation',
- rotation=correction_quaternion,
- scale=None,
- skin=None,
- translation=None,
- weights=None
- )
-
-
def __convert_swizzle_location(loc, export_settings):
"""Convert a location from Blender coordinate system to glTF coordinate system."""
if export_settings[gltf2_blender_export_keys.YUP]:
diff --git a/io_scene_gltf2/blender/exp/gltf2_blender_gather_primitives.py b/io_scene_gltf2/blender/exp/gltf2_blender_gather_primitives.py
index 82ff7f66..9e5ce648 100755
--- a/io_scene_gltf2/blender/exp/gltf2_blender_gather_primitives.py
+++ b/io_scene_gltf2/blender/exp/gltf2_blender_gather_primitives.py
@@ -7,7 +7,7 @@ import numpy as np
from .gltf2_blender_export_keys import NORMALS, MORPH_NORMAL, TANGENTS, MORPH_TANGENT, MORPH
-from io_scene_gltf2.blender.exp.gltf2_blender_gather_cache import cached
+from io_scene_gltf2.blender.exp.gltf2_blender_gather_cache import cached, cached_by_key
from io_scene_gltf2.blender.exp import gltf2_blender_extract
from io_scene_gltf2.blender.exp import gltf2_blender_gather_accessors
from io_scene_gltf2.blender.exp import gltf2_blender_gather_primitive_attributes
@@ -20,13 +20,34 @@ from io_scene_gltf2.io.com.gltf2_io_debug import print_console
@cached
+def get_primitive_cache_key(
+ blender_mesh,
+ blender_object,
+ vertex_groups,
+ modifiers,
+ materials,
+ export_settings):
+
+ # Use id of mesh
+ # Do not use bpy.types that can be unhashable
+ # Do not use mesh name, that can be not unique (when linked)
+
+ # TODO check what is really needed for modifiers
+
+ return (
+ (id(blender_mesh),),
+ (modifiers,),
+ tuple(id(m) if m is not None else None for m in materials)
+ )
+
+
+@cached_by_key(key=get_primitive_cache_key)
def gather_primitives(
blender_mesh: bpy.types.Mesh,
- library: Optional[str],
- blender_object: Optional[bpy.types.Object],
+ uuid_for_skined_data,
vertex_groups: Optional[bpy.types.VertexGroups],
modifiers: Optional[bpy.types.ObjectModifiers],
- material_names: Tuple[str],
+ materials: Tuple[bpy.types.Material],
export_settings
) -> List[gltf2_io.MeshPrimitive]:
"""
@@ -36,7 +57,7 @@ def gather_primitives(
"""
primitives = []
- blender_primitives = __gather_cache_primitives(blender_mesh, library, blender_object,
+ blender_primitives = __gather_cache_primitives(blender_mesh, uuid_for_skined_data,
vertex_groups, modifiers, export_settings)
for internal_primitive in blender_primitives:
@@ -45,14 +66,13 @@ def gather_primitives(
if export_settings['gltf_materials'] == "EXPORT" and material_idx is not None:
blender_material = None
- if material_names:
- i = material_idx if material_idx < len(material_names) else -1
- material_name = material_names[i]
- if material_name is not None:
- blender_material = bpy.data.materials[material_name]
- if blender_material is not None:
+ mat = None
+ if materials:
+ i = material_idx if material_idx < len(materials) else -1
+ mat = materials[i]
+ if mat is not None:
material = gltf2_blender_gather_materials.gather_material(
- blender_material,
+ mat,
export_settings,
)
@@ -72,8 +92,7 @@ def gather_primitives(
@cached
def __gather_cache_primitives(
blender_mesh: bpy.types.Mesh,
- library: Optional[str],
- blender_object: Optional[bpy.types.Object],
+ uuid_for_skined_data,
vertex_groups: Optional[bpy.types.VertexGroups],
modifiers: Optional[bpy.types.ObjectModifiers],
export_settings
@@ -84,7 +103,7 @@ def __gather_cache_primitives(
primitives = []
blender_primitives = gltf2_blender_extract.extract_primitives(
- None, blender_mesh, library, blender_object, vertex_groups, modifiers, export_settings)
+ blender_mesh, uuid_for_skined_data, vertex_groups, modifiers, export_settings)
for internal_primitive in blender_primitives:
primitive = {
diff --git a/io_scene_gltf2/blender/exp/gltf2_blender_gather_skins.py b/io_scene_gltf2/blender/exp/gltf2_blender_gather_skins.py
index e5534c5a..136d654d 100755
--- a/io_scene_gltf2/blender/exp/gltf2_blender_gather_skins.py
+++ b/io_scene_gltf2/blender/exp/gltf2_blender_gather_skins.py
@@ -10,10 +10,12 @@ from io_scene_gltf2.io.com import gltf2_io_constants
from io_scene_gltf2.blender.exp import gltf2_blender_gather_accessors
from io_scene_gltf2.blender.exp import gltf2_blender_gather_joints
from io_scene_gltf2.io.exp.gltf2_io_user_extensions import export_user_extensions
+from io_scene_gltf2.blender.exp import gltf2_blender_gather_tree
+from io_scene_gltf2.blender.exp.gltf2_blender_gather_tree import VExportNode
@cached
-def gather_skin(blender_object, export_settings):
+def gather_skin(armature_uuid, export_settings):
"""
Gather armatures, bones etc into a glTF2 skin object.
@@ -21,78 +23,70 @@ def gather_skin(blender_object, export_settings):
:param export_settings:
:return: a glTF2 skin object
"""
- if not __filter_skin(blender_object, export_settings):
+
+ blender_armature_object = export_settings['vtree'].nodes[armature_uuid].blender_object
+
+ if not __filter_skin(blender_armature_object, export_settings):
return None
skin = gltf2_io.Skin(
- extensions=__gather_extensions(blender_object, export_settings),
- extras=__gather_extras(blender_object, export_settings),
- inverse_bind_matrices=__gather_inverse_bind_matrices(blender_object, export_settings),
- joints=__gather_joints(blender_object, export_settings),
- name=__gather_name(blender_object, export_settings),
- skeleton=__gather_skeleton(blender_object, export_settings)
+ extensions=__gather_extensions(blender_armature_object, export_settings),
+ extras=__gather_extras(blender_armature_object, export_settings),
+ inverse_bind_matrices=__gather_inverse_bind_matrices(armature_uuid, export_settings),
+ joints=__gather_joints(armature_uuid, export_settings),
+ name=__gather_name(blender_armature_object, export_settings),
+ skeleton=__gather_skeleton(blender_armature_object, export_settings)
)
- export_user_extensions('gather_skin_hook', export_settings, skin, blender_object)
+ # If armature is not exported, joints will be empty.
+ # Do not construct skin in that case
+ if len(skin.joints) == 0:
+ return None
+
+ export_user_extensions('gather_skin_hook', export_settings, skin, blender_armature_object)
return skin
-def __filter_skin(blender_object, export_settings):
+def __filter_skin(blender_armature_object, export_settings):
if not export_settings[gltf2_blender_export_keys.SKINS]:
return False
- if blender_object.type != 'ARMATURE' or len(blender_object.pose.bones) == 0:
+ if blender_armature_object.type != 'ARMATURE' or len(blender_armature_object.pose.bones) == 0:
return False
return True
-def __gather_extensions(blender_object, export_settings):
+def __gather_extensions(blender_armature_object, export_settings):
return None
-def __gather_extras(blender_object, export_settings):
+def __gather_extras(blender_armature_object, export_settings):
return None
-def __gather_inverse_bind_matrices(blender_object, export_settings):
+def __gather_inverse_bind_matrices(armature_uuid, export_settings):
+
+ blender_armature_object = export_settings['vtree'].nodes[armature_uuid].blender_object
+
axis_basis_change = mathutils.Matrix.Identity(4)
if export_settings[gltf2_blender_export_keys.YUP]:
axis_basis_change = mathutils.Matrix(
((1.0, 0.0, 0.0, 0.0), (0.0, 0.0, 1.0, 0.0), (0.0, -1.0, 0.0, 0.0), (0.0, 0.0, 0.0, 1.0)))
- if export_settings['gltf_def_bones'] is False:
- # build the hierarchy of nodes out of the bones
- root_bones = []
- for blender_bone in blender_object.pose.bones:
- if not blender_bone.parent:
- root_bones.append(blender_bone)
- else:
- _, children_, root_bones = get_bone_tree(None, blender_object)
-
- matrices = []
-
- # traverse the matrices in the same order as the joints and compute the inverse bind matrix
+ bones_uuid = export_settings['vtree'].get_all_bones(armature_uuid)
def __collect_matrices(bone):
inverse_bind_matrix = (
axis_basis_change @
(
- blender_object.matrix_world @
+ blender_armature_object.matrix_world @
bone.bone.matrix_local
)
).inverted_safe()
matrices.append(inverse_bind_matrix)
- if export_settings['gltf_def_bones'] is False:
- for child in bone.children:
- __collect_matrices(child)
- else:
- if bone.name in children_.keys():
- for child in children_[bone.name]:
- __collect_matrices(blender_object.pose.bones[child])
-
- # start with the "root" bones and recurse into children, in the same ordering as the how joints are gathered
- for root_bone in root_bones:
- __collect_matrices(root_bone)
+ matrices = []
+ for b in bones_uuid:
+ __collect_matrices(blender_armature_object.pose.bones[export_settings['vtree'].nodes[b].blender_bone.name])
# flatten the matrices
inverse_matrices = []
@@ -113,67 +107,26 @@ def __gather_inverse_bind_matrices(blender_object, export_settings):
)
-def __gather_joints(blender_object, export_settings):
- root_joints = []
- if export_settings['gltf_def_bones'] is False:
- # build the hierarchy of nodes out of the bones
- for blender_bone in blender_object.pose.bones:
- if not blender_bone.parent:
- root_joints.append(gltf2_blender_gather_joints.gather_joint(blender_object, blender_bone, export_settings))
- else:
- _, children_, root_joints = get_bone_tree(None, blender_object)
- root_joints = [gltf2_blender_gather_joints.gather_joint(blender_object, i, export_settings) for i in root_joints]
-
- # joints is a flat list containing all nodes belonging to the skin
- joints = []
-
- def __collect_joints(node):
- joints.append(node)
- if export_settings['gltf_def_bones'] is False:
- for child in node.children:
- __collect_joints(child)
- else:
- if node.name in children_.keys():
- for child in children_[node.name]:
- __collect_joints(gltf2_blender_gather_joints.gather_joint(blender_object, blender_object.pose.bones[child], export_settings))
-
- for joint in root_joints:
- __collect_joints(joint)
+def __gather_joints(armature_uuid, export_settings):
+
+ blender_armature_object = export_settings['vtree'].nodes[armature_uuid].blender_object
+
+ all_armature_children = export_settings['vtree'].nodes[armature_uuid].children
+ root_bones_uuid = [c for c in all_armature_children if export_settings['vtree'].nodes[c].blender_type == VExportNode.BONE]
+ # Create bone nodes
+ for root_bone_uuid in root_bones_uuid:
+ gltf2_blender_gather_joints.gather_joint_vnode(root_bone_uuid, export_settings)
+
+ bones_uuid = export_settings['vtree'].get_all_bones(armature_uuid)
+ joints = [export_settings['vtree'].nodes[b].node for b in bones_uuid]
return joints
-def __gather_name(blender_object, export_settings):
- return blender_object.name
+def __gather_name(blender_armature_object, export_settings):
+ return blender_armature_object.name
-def __gather_skeleton(blender_object, export_settings):
+def __gather_skeleton(blender_armature_object, export_settings):
# In the future support the result of https://github.com/KhronosGroup/glTF/pull/1195
- return None # gltf2_blender_gather_nodes.gather_node(blender_object, blender_scene, export_settings)
-
-@cached
-def get_bone_tree(blender_dummy, blender_object):
-
- bones = []
- children = {}
- root_bones = []
-
- def get_parent(bone):
- bones.append(bone.name)
- if bone.parent is not None:
- if bone.parent.name not in children.keys():
- children[bone.parent.name] = []
- children[bone.parent.name].append(bone.name)
- get_parent(bone.parent)
- else:
- root_bones.append(bone.name)
-
- for bone in [b for b in blender_object.data.bones if b.use_deform is True]:
- get_parent(bone)
-
- # remove duplicates
- for k, v in children.items():
- children[k] = list(set(v))
- list_ = list(set(bones))
- root_ = list(set(root_bones))
- return [blender_object.data.bones[b] for b in list_], children, [blender_object.pose.bones[b] for b in root_]
+ return None
diff --git a/io_scene_gltf2/blender/exp/gltf2_blender_gather_tree.py b/io_scene_gltf2/blender/exp/gltf2_blender_gather_tree.py
new file mode 100644
index 00000000..d3edd50a
--- /dev/null
+++ b/io_scene_gltf2/blender/exp/gltf2_blender_gather_tree.py
@@ -0,0 +1,374 @@
+# SPDX-License-Identifier: Apache-2.0
+# Copyright 2021 The glTF-Blender-IO authors.
+
+import bpy
+import uuid
+
+from . import gltf2_blender_export_keys
+from mathutils import Quaternion, Matrix
+
+class VExportNode:
+
+ OBJECT = 1
+ ARMATURE = 2
+ BONE = 3
+ LIGHT = 4
+ CAMERA = 5
+ COLLECTION = 6
+
+ # Parent type, to be set on child regarding its parent
+ NO_PARENT = 54
+ PARENT_OBJECT = 50
+ PARENT_BONE = 51
+ PARENT_BONE_RELATIVE = 52
+ PARENT_ROOT_BONE = 53
+ PARENT_BONE_BONE = 55
+
+
+ def __init__(self):
+ self.children = []
+ self.blender_type = None
+ self.world_matrix = None
+ self.parent_type = None
+
+ self.blender_object = None
+ self.blender_bone = None
+
+ self.force_as_empty = False # Used for instancer display
+
+ # Only for bone/bone and object parented to bone
+ self.parent_bone_uuid = None
+
+ # Only for bones
+ self.use_deform = None
+
+ # Only for armature
+ self.bones = {}
+
+ # For deformed object
+ self.armature = None # for deformed object and for bone
+ self.skin = None
+
+ # glTF
+ self.node = None
+
+ def add_child(self, uuid):
+ self.children.append(uuid)
+
+ def set_world_matrix(self, matrix):
+ self.world_matrix = matrix
+
+ def set_blender_data(self, blender_object, blender_bone):
+ self.blender_object = blender_object
+ self.blender_bone = blender_bone
+
+ def recursive_display(self, tree, mode):
+ if mode == "simple":
+ for c in self.children:
+ print(self.blender_object.name, "/", self.blender_bone.name if self.blender_bone else "", "-->", tree.nodes[c].blender_object.name, "/", tree.nodes[c].blender_bone.name if tree.nodes[c].blender_bone else "" )
+ tree.nodes[c].recursive_display(tree, mode)
+
+class VExportTree:
+ def __init__(self, export_settings):
+ self.nodes = {}
+ self.roots = []
+
+ self.export_settings = export_settings
+
+ self.tree_troncated = False
+
+ def add_node(self, node):
+ self.nodes[node.uuid] = node
+
+ def add_children(self, uuid_parent, uuid_child):
+ self.nodes[uuid_parent].add_child(uuid_child)
+
+ def construct(self, blender_scene):
+ bpy.context.window.scene = blender_scene
+ depsgraph = bpy.context.evaluated_depsgraph_get()
+
+ for blender_object in [obj.original for obj in depsgraph.scene_eval.objects if obj.parent is None]:
+ self.recursive_node_traverse(blender_object, None, None, Matrix.Identity(4))
+
+ def recursive_node_traverse(self, blender_object, blender_bone, parent_uuid, parent_coll_matrix_world, armature_uuid=None, dupli_world_matrix=None):
+ node = VExportNode()
+ node.uuid = str(uuid.uuid4())
+ node.parent_uuid = parent_uuid
+ node.set_blender_data(blender_object, blender_bone)
+
+ # add to parent if needed
+ if parent_uuid is not None:
+ self.add_children(parent_uuid, node.uuid)
+ else:
+ self.roots.append(node.uuid)
+
+ # Set blender type
+ if blender_bone is not None:
+ node.blender_type = VExportNode.BONE
+ self.nodes[armature_uuid].bones[blender_bone.name] = node.uuid
+ node.use_deform = blender_bone.id_data.data.bones[blender_bone.name].use_deform
+ elif blender_object.type == "ARMATURE":
+ node.blender_type = VExportNode.ARMATURE
+ elif blender_object.type == "CAMERA":
+ node.blender_type = VExportNode.CAMERA
+ elif blender_object.type == "LIGHT":
+ node.blender_type = VExportNode.LIGHT
+ elif blender_object.instance_type == "COLLECTION":
+ node.blender_type = VExportNode.COLLECTION
+ else:
+ node.blender_type = VExportNode.OBJECT
+
+ # For meshes with armature modifier (parent is armature), keep armature uuid
+ if node.blender_type == VExportNode.OBJECT:
+ modifiers = {m.type: m for m in blender_object.modifiers}
+ if "ARMATURE" in modifiers and modifiers["ARMATURE"].object is not None:
+ if parent_uuid is None or not self.nodes[parent_uuid].blender_type == VExportNode.ARMATURE:
+ # correct workflow is to parent skinned mesh to armature, but ...
+ # all users don't use correct workflow
+ print("WARNING: Armature must be the parent of skinned mesh")
+ print("Armature is selected by its name, but may be false in case of instances")
+ # Search an armature by name, and use the first found
+ # This will be done after all objects are setup
+ node.armature_needed = modifiers["ARMATURE"].object.name
+ else:
+ node.armature = parent_uuid
+
+ # For bones, store uuid of armature
+ if blender_bone is not None:
+ node.armature = armature_uuid
+
+ # for bone/bone parenting, store parent, this will help armature tree management
+ if parent_uuid is not None and self.nodes[parent_uuid].blender_type == VExportNode.BONE and node.blender_type == VExportNode.BONE:
+ node.parent_bone_uuid = parent_uuid
+
+
+ # Objects parented to bone
+ if parent_uuid is not None and self.nodes[parent_uuid].blender_type == VExportNode.BONE and node.blender_type != VExportNode.BONE:
+ node.parent_bone_uuid = parent_uuid
+
+ # World Matrix
+ # Store World Matrix for objects
+ if dupli_world_matrix is not None:
+ node.matrix_world = dupli_world_matrix
+ elif node.blender_type in [VExportNode.OBJECT, VExportNode.COLLECTION, VExportNode.ARMATURE, VExportNode.CAMERA, VExportNode.LIGHT]:
+ # Matrix World of object is expressed based on collection instance objects are
+ # So real world matrix is collection world_matrix @ "world_matrix" of object
+ node.matrix_world = parent_coll_matrix_world @ blender_object.matrix_world.copy()
+ if node.blender_type == VExportNode.CAMERA and self.export_settings[gltf2_blender_export_keys.CAMERAS]:
+ correction = Quaternion((2**0.5/2, -2**0.5/2, 0.0, 0.0))
+ node.matrix_world @= correction.to_matrix().to_4x4()
+ elif node.blender_type == VExportNode.LIGHT and self.export_settings[gltf2_blender_export_keys.LIGHTS]:
+ correction = Quaternion((2**0.5/2, -2**0.5/2, 0.0, 0.0))
+ node.matrix_world @= correction.to_matrix().to_4x4()
+ elif node.blender_type == VExportNode.BONE:
+ node.matrix_world = self.nodes[node.armature].matrix_world @ blender_bone.matrix
+ axis_basis_change = Matrix(
+ ((1.0, 0.0, 0.0, 0.0), (0.0, 0.0, 1.0, 0.0), (0.0, -1.0, 0.0, 0.0), (0.0, 0.0, 0.0, 1.0)))
+ node.matrix_world = node.matrix_world @ axis_basis_change
+
+ # Force empty ?
+ # For duplis, if instancer is not display, we should create an empty
+ if blender_object.is_instancer is True and blender_object.show_instancer_for_render is False:
+ node.force_as_empty = True
+
+ # Storing this node
+ self.add_node(node)
+
+ ###### Manage children ######
+
+ # standard children
+ if blender_bone is None and blender_object.is_instancer is False:
+ for child_object in blender_object.children:
+ if child_object.parent_bone:
+ # Object parented to bones
+ # Will be manage later
+ continue
+ else:
+ # Classic parenting
+ self.recursive_node_traverse(child_object, None, node.uuid, parent_coll_matrix_world)
+
+ # Collections
+ if blender_object.instance_type == 'COLLECTION' and blender_object.instance_collection:
+ for dupli_object in blender_object.instance_collection.objects:
+ if dupli_object.parent is not None:
+ continue
+ self.recursive_node_traverse(dupli_object, None, node.uuid, node.matrix_world)
+
+ # Armature : children are bones with no parent
+ if blender_object.type == "ARMATURE" and blender_bone is None:
+ for b in [b for b in blender_object.pose.bones if b.parent is None]:
+ self.recursive_node_traverse(blender_object, b, node.uuid, parent_coll_matrix_world, node.uuid)
+
+ # Bones
+ if blender_object.type == "ARMATURE" and blender_bone is not None:
+ for b in blender_bone.children:
+ self.recursive_node_traverse(blender_object, b, node.uuid, parent_coll_matrix_world, armature_uuid)
+
+ # Object parented to bone
+ if blender_bone is not None:
+ for child_object in [c for c in blender_object.children if c.parent_bone is not None and c.parent_bone == blender_bone.name]:
+ self.recursive_node_traverse(child_object, None, node.uuid, parent_coll_matrix_world)
+
+ # Duplis
+ if blender_object.is_instancer is True and blender_object.instance_type != 'COLLECTION':
+ depsgraph = bpy.context.evaluated_depsgraph_get()
+ for (dupl, mat) in [(dup.object.original, dup.matrix_world.copy()) for dup in depsgraph.object_instances if dup.parent and id(dup.parent.original) == id(blender_object)]:
+ self.recursive_node_traverse(dupl, None, node.uuid, parent_coll_matrix_world, dupli_world_matrix=mat)
+
+ def get_all_objects(self):
+ return [n.uuid for n in self.nodes.values() if n.blender_type != VExportNode.BONE]
+
+ def get_all_bones(self, uuid): #For armatue Only
+ if self.nodes[uuid].blender_type == VExportNode.ARMATURE:
+ def recursive_get_all_bones(uuid):
+ total = []
+ if self.nodes[uuid].blender_type == VExportNode.BONE:
+ total.append(uuid)
+ for child_uuid in self.nodes[uuid].children:
+ total.extend(recursive_get_all_bones(child_uuid))
+
+ return total
+
+ tot = []
+ for c_uuid in self.nodes[uuid].children:
+ tot.extend(recursive_get_all_bones(c_uuid))
+ return tot
+ else:
+ return []
+
+ def display(self, mode):
+ if mode == "simple":
+ for n in self.roots:
+ print("Root", self.nodes[n].blender_object.name, "/", self.nodes[n].blender_bone.name if self.nodes[n].blender_bone else "" )
+ self.nodes[n].recursive_display(self, mode)
+
+
+ def filter_tag(self):
+ roots = self.roots.copy()
+ for r in roots:
+ self.recursive_filter_tag(r, None)
+
+ def filter_perform(self):
+ roots = self.roots.copy()
+ for r in roots:
+ self.recursive_filter(r, None) # Root, so no parent
+
+ def filter(self):
+ self.filter_tag()
+ self.filter_perform()
+
+
+ def recursive_filter_tag(self, uuid, parent_keep_tag):
+ # parent_keep_tag is for collection instance
+ # some properties (selection, visibility, renderability)
+ # are defined at collection level, and we need to use these values
+ # for all objects of the collection instance.
+ # But some properties (camera, lamp ...) are not defined at collection level
+ if parent_keep_tag is None:
+ self.nodes[uuid].keep_tag = self.node_filter_not_inheritable_is_kept(uuid) and self.node_filter_inheritable_is_kept(uuid)
+ elif parent_keep_tag is True:
+ self.nodes[uuid].keep_tag = self.node_filter_not_inheritable_is_kept(uuid)
+ elif parent_keep_tag is False:
+ self.nodes[uuid].keep_tag = False
+ else:
+ print("This should not happen!")
+
+ for child in self.nodes[uuid].children:
+ if self.nodes[uuid].blender_type == VExportNode.COLLECTION:
+ self.recursive_filter_tag(child, self.nodes[uuid].keep_tag)
+ else:
+ self.recursive_filter_tag(child, parent_keep_tag)
+
+ def recursive_filter(self, uuid, parent_kept_uuid):
+ children = self.nodes[uuid].children.copy()
+
+ new_parent_kept_uuid = None
+ if self.nodes[uuid].keep_tag is False:
+ new_parent_kept_uuid = parent_kept_uuid
+ # Need to modify tree
+ if self.nodes[uuid].parent_uuid is not None:
+ self.nodes[self.nodes[uuid].parent_uuid].children.remove(uuid)
+ else:
+ # Remove from root
+ self.roots.remove(uuid)
+ else:
+ new_parent_kept_uuid = uuid
+
+ # If parent_uuid is not parent_kept_uuid, we need to modify children list of parent_kept_uuid
+ if parent_kept_uuid != self.nodes[uuid].parent_uuid and parent_kept_uuid is not None:
+ self.tree_troncated = True
+ self.nodes[parent_kept_uuid].children.append(uuid)
+
+ # If parent_kept_uuid is None, and parent_uuid was not, add to root list
+ if self.nodes[uuid].parent_uuid is not None and parent_kept_uuid is None:
+ self.tree_troncated = True
+ self.roots.append(uuid)
+
+ # Modify parent uuid
+ self.nodes[uuid].parent_uuid = parent_kept_uuid
+
+ for child in children:
+ self.recursive_filter(child, new_parent_kept_uuid)
+
+
+ def node_filter_not_inheritable_is_kept(self, uuid):
+ # Export Camera or not
+ if self.nodes[uuid].blender_type == VExportNode.CAMERA:
+ if self.export_settings[gltf2_blender_export_keys.CAMERAS] is False:
+ return False
+
+ # Export Lamp or not
+ if self.nodes[uuid].blender_type == VExportNode.LIGHT:
+ if self.export_settings[gltf2_blender_export_keys.LIGHTS] is False:
+ return False
+
+ # Export deform bones only
+ if self.nodes[uuid].blender_type == VExportNode.BONE:
+ if self.export_settings['gltf_def_bones'] is True and self.nodes[uuid].use_deform is False:
+ # Check if bone has some objected parented to bone. We need to keep it in that case, even if this is not a def bone
+ if len([c for c in self.nodes[uuid].children if self.nodes[c].blender_type != VExportNode.BONE]) != 0:
+ return True
+ return False
+
+ return True
+
+ def node_filter_inheritable_is_kept(self, uuid):
+
+ if self.export_settings[gltf2_blender_export_keys.SELECTED] and self.nodes[uuid].blender_object.select_get() is False:
+ return False
+
+ if self.export_settings[gltf2_blender_export_keys.VISIBLE]:
+ # The eye in outliner (object)
+ if self.nodes[uuid].blender_object.visible_get() is False:
+ return False
+
+ # The screen in outliner (object)
+ if self.nodes[uuid].blender_object.hide_viewport is True:
+ return False
+
+ # The screen in outliner (collections)
+ if all([c.hide_viewport for c in self.nodes[uuid].blender_object.users_collection]):
+ return False
+
+ # The camera in outliner (object)
+ if self.export_settings[gltf2_blender_export_keys.RENDERABLE]:
+ if self.nodes[uuid].blender_object.hide_render is True:
+ return False
+
+ # The camera in outliner (collections)
+ if all([c.hide_render for c in self.nodes[uuid].blender_object.users_collection]):
+ return False
+
+ if self.export_settings[gltf2_blender_export_keys.ACTIVE_COLLECTION]:
+ found = any(x == self.nodes[uuid].blender_object for x in bpy.context.collection.all_objects)
+ if not found:
+ return False
+
+ return True
+
+ def search_missing_armature(self):
+ for n in [n for n in self.nodes.values() if hasattr(n, "armature_needed") is True]:
+ candidates = [i for i in self.nodes.values() if i.blender_type == VExportNode.ARMATURE and i.blender_object.name == n.armature_needed]
+ if len(candidates) > 0:
+ n.armature = candidates[0].uuid
+ del n.armature_needed
diff --git a/io_scene_gltf2/blender/exp/gltf2_blender_get.py b/io_scene_gltf2/blender/exp/gltf2_blender_get.py
index 940f6f0a..e38906e6 100755
--- a/io_scene_gltf2/blender/exp/gltf2_blender_get.py
+++ b/io_scene_gltf2/blender/exp/gltf2_blender_get.py
@@ -32,6 +32,21 @@ def get_object_from_datapath(blender_object, data_path: str):
return prop
+def get_node_socket(blender_material, type, name):
+ """
+ For a given material input name, retrieve the corresponding node tree socket for a given node type.
+
+ :param blender_material: a blender material for which to get the socket
+ :return: a blender NodeSocket for a given type
+ """
+ nodes = [n for n in blender_material.node_tree.nodes if isinstance(n, type) and not n.mute]
+ nodes = [node for node in nodes if check_if_is_linked_to_active_output(node.outputs[0])]
+ inputs = sum([[input for input in node.inputs if input.name == name] for node in nodes], [])
+ if inputs:
+ return inputs[0]
+ return None
+
+
def get_socket(blender_material: bpy.types.Material, name: str):
"""
For a given material input name, retrieve the corresponding node tree socket.
@@ -46,13 +61,9 @@ def get_socket(blender_material: bpy.types.Material, name: str):
if name == "Emissive":
# Check for a dedicated Emission node first, it must supersede the newer built-in one
# because the newer one is always present in all Principled BSDF materials.
- type = bpy.types.ShaderNodeEmission
- name = "Color"
- nodes = [n for n in blender_material.node_tree.nodes if isinstance(n, type) and not n.mute]
- nodes = [node for node in nodes if check_if_is_linked_to_active_output(node.outputs[0])]
- inputs = sum([[input for input in node.inputs if input.name == name] for node in nodes], [])
- if inputs:
- return inputs[0]
+ emissive_socket = get_node_socket(blender_material, bpy.types.ShaderNodeEmission, "Color")
+ if emissive_socket:
+ return emissive_socket
# If a dedicated Emission node was not found, fall back to the Principled BSDF Emission socket.
name = "Emission"
type = bpy.types.ShaderNodeBsdfPrincipled
@@ -61,11 +72,8 @@ def get_socket(blender_material: bpy.types.Material, name: str):
name = "Color"
else:
type = bpy.types.ShaderNodeBsdfPrincipled
- nodes = [n for n in blender_material.node_tree.nodes if isinstance(n, type) and not n.mute]
- nodes = [node for node in nodes if check_if_is_linked_to_active_output(node.outputs[0])]
- inputs = sum([[input for input in node.inputs if input.name == name] for node in nodes], [])
- if inputs:
- return inputs[0]
+
+ return get_node_socket(blender_material, type, name)
return None
diff --git a/magic_uv/__init__.py b/magic_uv/__init__.py
index bc365eeb..18bdb17c 100644
--- a/magic_uv/__init__.py
+++ b/magic_uv/__init__.py
@@ -19,8 +19,6 @@ bl_info = {
"description": "UV Toolset. See Add-ons Preferences for details",
"warning": "",
"support": "COMMUNITY",
- "wiki_url": "https://docs.blender.org/manual/en/dev/addons/"
- "uv/magic_uv.html",
"doc_url": "{BLENDER_MANUAL_URL}/addons/uv/magic_uv.html",
"tracker_url": "https://github.com/nutti/Magic-UV",
"category": "UV",
diff --git a/measureit/__init__.py b/measureit/__init__.py
index c0bf066b..8b0c827c 100644
--- a/measureit/__init__.py
+++ b/measureit/__init__.py
@@ -16,8 +16,7 @@ bl_info = {
"version": (1, 8, 1),
"blender": (2, 80, 0),
"description": "Tools for measuring objects.",
- "doc_url": "{BLENDER_MANUAL_URL}/addons"
- "/3d_view/measureit.html",
+ "doc_url": "{BLENDER_MANUAL_URL}/addons/3d_view/measureit.html",
"category": "3D View"
}
diff --git a/mesh_snap_utilities_line/__init__.py b/mesh_snap_utilities_line/__init__.py
index 29a9a1a4..9114e96e 100644
--- a/mesh_snap_utilities_line/__init__.py
+++ b/mesh_snap_utilities_line/__init__.py
@@ -10,7 +10,7 @@ bl_info = {
"blender": (3, 0, 0),
"location": "View3D > TOOLS > Line Tool",
"description": "Extends Blender Snap controls",
- "doc_url" : "https://docs.blender.org/manual/en/latest/addons/mesh/snap_utilities_line.html",
+ "doc_url" : "{BLENDER_MANUAL_URL}/addons/mesh/snap_utilities_line.html",
"category": "Mesh",
}
diff --git a/mesh_tiny_cad/__init__.py b/mesh_tiny_cad/__init__.py
index 42e76c1b..75374925 100644
--- a/mesh_tiny_cad/__init__.py
+++ b/mesh_tiny_cad/__init__.py
@@ -9,7 +9,7 @@ bl_info = {
"version": (1, 3, 2),
"blender": (2, 80, 0),
"location": "View3D > EditMode Context Menu",
- "doc_url": "http://zeffii.github.io/mesh_tiny_cad/",
+ "doc_url": "{BLENDER_MANUAL_URL}/addons/mesh/tinycad.html",
"tracker_url": "https://github.com/zeffii/mesh_tiny_cad/issues",
"category": "Mesh",
}
diff --git a/mesh_tissue/README.md b/mesh_tissue/README.md
new file mode 100644
index 00000000..dfdc6fec
--- /dev/null
+++ b/mesh_tissue/README.md
@@ -0,0 +1,47 @@
+# Tissue
+![cover](http://www.co-de-it.com/wordpress/wp-content/uploads/2015/07/tissue_graphics.jpg)
+Tissue - Blender's add-on for computational design by Co-de-iT
+http://www.co-de-it.com/wordpress/code/blender-tissue
+
+Tissue is already shipped with both Blender. However I recommend to update the default version downloading manually the most recent one, for more updated features and more stability.
+
+### Blender 2.93
+
+Tissue v0.3.52 for Blender 2.93 (latest stable release): https://github.com/alessandro-zomparelli/tissue/releases/tag/v0-3-52
+
+Development branch (usually the most updated version): https://github.com/alessandro-zomparelli/tissue/tree/b290-dev
+
+### Blender 2.79 (unsupported)
+
+Tissue v0.3.4 for Blender 2.79b (latest stable release): https://github.com/alessandro-zomparelli/tissue/releases/tag/v0-3-4
+
+Development branch (most updated version): https://github.com/alessandro-zomparelli/tissue/tree/dev1
+
+
+### Installation:
+
+1. Start Blender. Go to "Edit" and then "Preferences"
+2. Open the "Add-ons" preferences
+3. Click "install..." and point Blender at the downloaded zip file (on OSX it may have extracted the zip automatically, that won't work, so you have to zip the extracted folder again)
+4. You may see now two different versions of Tissue, activate only the second one and ignore the first one
+
+### Documentation
+
+Tissue documentation for Blender 2.80: https://github.com/alessandro-zomparelli/tissue/wiki
+
+
+### Issues
+Please help me keeping Tissue stable and updated, report any issues or feedback here: https://github.com/alessandro-zomparelli/tissue/issues
+
+### Contribute
+Tissue is free and open-source. I really think that this is the power of Blender and I wanted to give my small contribution to it.
+
+If you like my work and you want to help me, please consider to support me on **Patreon**, where I share some tips about Blender, Tissue and scripting: https://www.patreon.com/alessandrozomparelli
+
+[![Patreon](http://alessandrozomparelli.com/wp-content/uploads/2020/04/patreon-transparent-vector-small.png)](https://www.patreon.com/alessandrozomparelli)
+
+A special thanks to all my patrons, in particular to my **Tissue Supporters**: *TomaLaboratory*, *Scott Shorter*, *Garrett Post*, *Kairomon*, *Art Evans*, *Justin Davis*, *John Wise*, *Avi Bryant*, *Ahmed Saber*, *SlimeSound Production*, *Steffen Meier*.
+
+Many thanks,
+
+Alessandro
diff --git a/mesh_tissue/__init__.py b/mesh_tissue/__init__.py
index 0d777f50..03fa4b61 100644
--- a/mesh_tissue/__init__.py
+++ b/mesh_tissue/__init__.py
@@ -17,12 +17,12 @@
bl_info = {
"name": "Tissue",
"author": "Alessandro Zomparelli (Co-de-iT)",
- "version": (0, 3, 25),
- "blender": (2, 80, 0),
- "location": "Sidebar > Edit Tab",
+ "version": (0, 3, 52),
+ "blender": (2, 93, 0),
+ "location": "",
"description": "Tools for Computational Design",
"warning": "",
- "doc_url": "https://github.com/alessandro-zomparelli/tissue/wiki",
+ "doc_url": "{BLENDER_MANUAL_URL}/addons/mesh/tissue.html",
"tracker_url": "https://github.com/alessandro-zomparelli/tissue/issues",
"category": "Mesh",
}
@@ -31,59 +31,107 @@ bl_info = {
if "bpy" in locals():
import importlib
importlib.reload(tessellate_numpy)
- importlib.reload(colors_groups_exchanger)
+ importlib.reload(tissue_properties)
+ importlib.reload(weight_tools)
importlib.reload(dual_mesh)
importlib.reload(lattice)
importlib.reload(uv_to_mesh)
importlib.reload(utils)
+ importlib.reload(config)
+ importlib.reload(material_tools)
+ importlib.reload(curves_tools)
+ importlib.reload(polyhedra)
else:
from . import tessellate_numpy
- from . import colors_groups_exchanger
+ from . import tissue_properties
+ from . import weight_tools
from . import dual_mesh
from . import lattice
from . import uv_to_mesh
from . import utils
+ from . import config
+ from . import material_tools
+ from . import curves_tools
+ from . import polyhedra
import bpy
from bpy.props import PointerProperty, CollectionProperty, BoolProperty
+
classes = (
- tessellate_numpy.tissue_tessellate_prop,
- tessellate_numpy.tessellate,
- tessellate_numpy.update_tessellate,
+ config.tissuePreferences,
+ config.tissue_install_numba,
+
+ tissue_properties.tissue_prop,
+ tissue_properties.tissue_tessellate_prop,
+ tessellate_numpy.tissue_tessellate,
+ tessellate_numpy.tissue_update_tessellate,
+ tessellate_numpy.tissue_update_tessellate_deps,
tessellate_numpy.TISSUE_PT_tessellate,
- tessellate_numpy.rotate_face,
+ tessellate_numpy.tissue_rotate_face_left,
+ tessellate_numpy.tissue_rotate_face_right,
+ tessellate_numpy.tissue_rotate_face_flip,
tessellate_numpy.TISSUE_PT_tessellate_object,
-
- colors_groups_exchanger.face_area_to_vertex_groups,
- colors_groups_exchanger.vertex_colors_to_vertex_groups,
- colors_groups_exchanger.vertex_group_to_vertex_colors,
- colors_groups_exchanger.TISSUE_PT_weight,
- colors_groups_exchanger.TISSUE_PT_color,
- colors_groups_exchanger.weight_contour_curves,
- colors_groups_exchanger.weight_contour_mask,
- colors_groups_exchanger.weight_contour_displace,
- colors_groups_exchanger.harmonic_weight,
- colors_groups_exchanger.edges_deformation,
- colors_groups_exchanger.edges_bending,
- colors_groups_exchanger.weight_laplacian,
- colors_groups_exchanger.reaction_diffusion,
- colors_groups_exchanger.start_reaction_diffusion,
- colors_groups_exchanger.TISSUE_PT_reaction_diffusion,
- colors_groups_exchanger.reset_reaction_diffusion_weight,
- colors_groups_exchanger.formula_prop,
- colors_groups_exchanger.reaction_diffusion_prop,
- colors_groups_exchanger.weight_formula,
- colors_groups_exchanger.curvature_to_vertex_groups,
- colors_groups_exchanger.weight_formula_wiki,
+ tessellate_numpy.TISSUE_PT_tessellate_frame,
+ tessellate_numpy.TISSUE_PT_tessellate_component,
+ tessellate_numpy.TISSUE_PT_tessellate_thickness,
+ tessellate_numpy.TISSUE_PT_tessellate_direction,
+ tessellate_numpy.TISSUE_PT_tessellate_options,
+ tessellate_numpy.TISSUE_PT_tessellate_coordinates,
+ tessellate_numpy.TISSUE_PT_tessellate_rotation,
+ tessellate_numpy.TISSUE_PT_tessellate_selective,
+ tessellate_numpy.TISSUE_PT_tessellate_morphing,
+ tessellate_numpy.TISSUE_PT_tessellate_iterations,
+ tessellate_numpy.tissue_render_animation,
+
+ weight_tools.face_area_to_vertex_groups,
+ weight_tools.vertex_colors_to_vertex_groups,
+ weight_tools.vertex_group_to_vertex_colors,
+ weight_tools.vertex_group_to_uv,
+ weight_tools.TISSUE_PT_weight,
+ weight_tools.TISSUE_PT_color,
+ weight_tools.weight_contour_curves,
+ weight_tools.tissue_weight_contour_curves_pattern,
+ weight_tools.weight_contour_mask,
+ weight_tools.weight_contour_displace,
+ weight_tools.harmonic_weight,
+ weight_tools.edges_deformation,
+ weight_tools.edges_bending,
+ weight_tools.weight_laplacian,
+ weight_tools.reaction_diffusion,
+ weight_tools.start_reaction_diffusion,
+ weight_tools.TISSUE_PT_reaction_diffusion,
+ weight_tools.TISSUE_PT_reaction_diffusion_weight,
+ weight_tools.reset_reaction_diffusion_weight,
+ weight_tools.formula_prop,
+ weight_tools.reaction_diffusion_prop,
+ weight_tools.weight_formula,
+ weight_tools.update_weight_formula,
+ weight_tools.curvature_to_vertex_groups,
+ weight_tools.weight_formula_wiki,
+ weight_tools.tissue_weight_distance,
+ weight_tools.random_weight,
+ weight_tools.bake_reaction_diffusion,
+ weight_tools.reaction_diffusion_free_data,
+ weight_tools.tissue_weight_streamlines,
dual_mesh.dual_mesh,
dual_mesh.dual_mesh_tessellated,
lattice.lattice_along_surface,
- uv_to_mesh.uv_to_mesh
+ material_tools.random_materials,
+ material_tools.weight_to_materials,
+
+ curves_tools.tissue_to_curve_prop,
+ curves_tools.tissue_convert_to_curve,
+ curves_tools.tissue_convert_to_curve_update,
+ curves_tools.TISSUE_PT_convert_to_curve,
+
+ uv_to_mesh.uv_to_mesh,
+
+ polyhedra.polyhedra_wireframe
)
def register():
@@ -91,28 +139,29 @@ def register():
for cls in classes:
bpy.utils.register_class(cls)
#bpy.utils.register_module(__name__)
+ bpy.types.Object.tissue = PointerProperty(
+ type=tissue_properties.tissue_prop
+ )
bpy.types.Object.tissue_tessellate = PointerProperty(
- type=tessellate_numpy.tissue_tessellate_prop
+ type=tissue_properties.tissue_tessellate_prop
+ )
+ bpy.types.Object.tissue_to_curve = PointerProperty(
+ type=curves_tools.tissue_to_curve_prop
)
bpy.types.Object.formula_settings = CollectionProperty(
- type=colors_groups_exchanger.formula_prop
+ type=weight_tools.formula_prop
)
bpy.types.Object.reaction_diffusion_settings = PointerProperty(
- type=colors_groups_exchanger.reaction_diffusion_prop
+ type=weight_tools.reaction_diffusion_prop
)
- # colors_groups_exchanger
- bpy.app.handlers.frame_change_post.append(colors_groups_exchanger.reaction_diffusion_def)
+ # weight_tools
+ bpy.app.handlers.frame_change_post.append(weight_tools.reaction_diffusion_def)
#bpy.app.handlers.frame_change_post.append(tessellate_numpy.anim_tessellate)
def unregister():
from bpy.utils import unregister_class
for cls in classes:
bpy.utils.unregister_class(cls)
- #tessellate_numpy.unregister()
- #colors_groups_exchanger.unregister()
- #dual_mesh.unregister()
- #lattice.unregister()
- #uv_to_mesh.unregister()
del bpy.types.Object.tissue_tessellate
diff --git a/mesh_tissue/colors_groups_exchanger.py b/mesh_tissue/colors_groups_exchanger.py
deleted file mode 100644
index 98427477..00000000
--- a/mesh_tissue/colors_groups_exchanger.py
+++ /dev/null
@@ -1,2468 +0,0 @@
-# SPDX-License-Identifier: GPL-2.0-or-later
-
-#-------------------------- COLORS / GROUPS EXCHANGER -------------------------#
-# #
-# Vertex Color to Vertex Group allow you to convert colors channels to weight #
-# maps. #
-# The main purpose is to use vertex colors to store information when importing #
-# files from other software. The script works with the active vertex color #
-# slot. #
-# For use the command "Vertex Clors to Vertex Groups" use the search bar #
-# (space bar). #
-# #
-# (c) Alessandro Zomparelli #
-# (2017) #
-# #
-# http://www.co-de-it.com/ #
-# #
-################################################################################
-
-import bpy, bmesh
-import numpy as np
-import math, timeit, time
-from math import *#pi, sin
-from statistics import mean, stdev
-from mathutils import Vector
-from numpy import *
-try: from .numba_functions import numba_reaction_diffusion
-except: pass
-
-from bpy.types import (
- Operator,
- Panel,
- PropertyGroup,
- )
-
-from bpy.props import (
- BoolProperty,
- EnumProperty,
- FloatProperty,
- IntProperty,
- StringProperty,
- FloatVectorProperty,
- IntVectorProperty
-)
-
-from .utils import *
-
-def reaction_diffusion_add_handler(self, context):
- # remove existing handlers
- old_handlers = []
- for h in bpy.app.handlers.frame_change_post:
- if "reaction_diffusion" in str(h):
- old_handlers.append(h)
- for h in old_handlers: bpy.app.handlers.frame_change_post.remove(h)
- # add new handler
- bpy.app.handlers.frame_change_post.append(reaction_diffusion_def)
-
-class formula_prop(PropertyGroup):
- name : StringProperty()
- formula : StringProperty()
- float_var : FloatVectorProperty(name="", description="", default=(0, 0, 0, 0, 0), size=5)
- int_var : IntVectorProperty(name="", description="", default=(0, 0, 0, 0, 0), size=5)
-
-class reaction_diffusion_prop(PropertyGroup):
- run : BoolProperty(default=False, update = reaction_diffusion_add_handler,
- description='Compute a new iteration on frame changes. Currently is not working during Render Animation')
-
- time_steps : bpy.props.IntProperty(
- name="Steps", default=10, min=0, soft_max=50,
- description="Number of Steps")
-
- dt : bpy.props.FloatProperty(
- name="dt", default=1, min=0, soft_max=0.2,
- description="Time Step")
-
- diff_a : bpy.props.FloatProperty(
- name="Diff A", default=0.1, min=0, soft_max=2, precision=3,
- description="Diffusion A")
-
- diff_b : bpy.props.FloatProperty(
- name="Diff B", default=0.05, min=0, soft_max=2, precision=3,
- description="Diffusion B")
-
- f : bpy.props.FloatProperty(
- name="f", default=0.055, min=0, soft_max=0.5, precision=3,
- description="Feed Rate")
-
- k : bpy.props.FloatProperty(
- name="k", default=0.062, min=0, soft_max=0.5, precision=3,
- description="Kill Rate")
-
- diff_mult : bpy.props.FloatProperty(
- name="Scale", default=1, min=0, soft_max=1, max=2, precision=2,
- description="Multiplier for the diffusion of both substances")
-
-def compute_formula(ob=None, formula="rx", float_var=(0,0,0,0,0), int_var=(0,0,0,0,0)):
- verts = ob.data.vertices
- n_verts = len(verts)
-
- f1,f2,f3,f4,f5 = float_var
- i1,i2,i3,i4,i5 = int_var
-
- do_groups = "w[" in formula
- do_local = "lx" in formula or "ly" in formula or "lz" in formula
- do_global = "gx" in formula or "gy" in formula or "gz" in formula
- do_relative = "rx" in formula or "ry" in formula or "rz" in formula
- do_normal = "nx" in formula or "ny" in formula or "nz" in formula
- mat = ob.matrix_world
-
- for i in range(1000):
- if "w["+str(i)+"]" in formula and i > len(ob.vertex_groups)-1:
- return "w["+str(i)+"] not found"
-
- w = []
- for i in range(len(ob.vertex_groups)):
- w.append([])
- if "w["+str(i)+"]" in formula:
- vg = ob.vertex_groups[i]
- for v in verts:
- try:
- w[i].append(vg.weight(v.index))
- except:
- w[i].append(0)
- w[i] = array(w[i])
-
- start_time = timeit.default_timer()
- # compute vertex coordinates
- if do_local or do_relative or do_global:
- co = [0]*n_verts*3
- verts.foreach_get('co', co)
- np_co = array(co).reshape((n_verts, 3))
- lx, ly, lz = array(np_co).transpose()
- if do_relative:
- rx = np.interp(lx, (lx.min(), lx.max()), (0, +1))
- ry = np.interp(ly, (ly.min(), ly.max()), (0, +1))
- rz = np.interp(lz, (lz.min(), lz.max()), (0, +1))
- if do_global:
- co = [v.co for v in verts]
- global_co = []
- for v in co:
- global_co.append(mat * v)
- global_co = array(global_co).reshape((n_verts, 3))
- gx, gy, gz = array(global_co).transpose()
- # compute vertex normals
- if do_normal:
- normal = [0]*n_verts*3
- verts.foreach_get('normal', normal)
- normal = array(normal).reshape((n_verts, 3))
- nx, ny, nz = array(normal).transpose()
-
- try:
- weight = eval(formula)
- return weight
- except:
- return "There is something wrong"
- print("Weight Formula: " + str(timeit.default_timer() - start_time))
-
-class weight_formula_wiki(bpy.types.Operator):
- bl_idname = "scene.weight_formula_wiki"
- bl_label = "Online Documentation"
- bl_options = {'REGISTER', 'UNDO'}
-
- def execute(self, context):
- bpy.ops.wm.url_open(url="https://github.com/alessandro-zomparelli/tissue/wiki/Weight-Tools#weight-formula")
- return {'FINISHED'}
-
-class weight_formula(bpy.types.Operator):
- bl_idname = "object.weight_formula"
- bl_label = "Weight Formula"
- bl_options = {'REGISTER', 'UNDO'}
-
- ex = [
- #'cos(arctan(nx/ny)*6 + sin(rz*30)*0.5)/2 + cos(arctan(nx/ny)*6 - sin(rz*30)*0.5 + pi/2)/2 + 0.5',
- 'cos(arctan(nx/ny)*i1*2 + sin(rz*i3))/i2 + cos(arctan(nx/ny)*i1*2 - sin(rz*i3))/i2 + 0.5',
- 'cos(arctan(nx/ny)*i1*2 + sin(rz*i2))/2 + cos(arctan(nx/ny)*i1*2 - sin(rz*i2))/2',
- '(sin(arctan(nx/ny)*i1)*sin(nz*i1)+1)/2',
- 'cos(arctan(nx/ny)*f1)',
- 'cos(arctan(lx/ly)*f1 + sin(rz*f2)*f3)',
- 'sin(nx*15)<sin(ny*15)',
- 'cos(ny*rz**2*i1)',
- 'sin(rx*30) > 0',
- 'sin(nz*i1)',
- 'w[0]**2',
- 'sqrt((rx-0.5)**2 + (ry-0.5)**2)*2',
- 'abs(0.5-rz)*2',
- 'rx'
- ]
- ex_items = list((s,s,"") for s in ex)
- ex_items.append(('CUSTOM', "User Formula", ""))
-
- examples : bpy.props.EnumProperty(
- items = ex_items, default='CUSTOM', name="Examples")
-
- old_ex = ""
-
- formula : bpy.props.StringProperty(
- name="Formula", default="", description="Formula to Evaluate")
- bl_description = ("Generate a Vertex Group based on the given formula")
-
- slider_f01 : bpy.props.FloatProperty(
- name="f1", default=1, description="Slider")
- bl_description = ("Slider Float 1")
- slider_f02 : bpy.props.FloatProperty(
- name="f2", default=1, description="Slider")
- bl_description = ("Slider Float 2")
- slider_f03 : bpy.props.FloatProperty(
- name="f3", default=1, description="Slider")
- bl_description = ("Slider Float 3")
- slider_f04 : bpy.props.FloatProperty(
- name="f4", default=1, description="Slider")
- bl_description = ("Slider Float 4")
- slider_f05 : bpy.props.FloatProperty(
- name="f5", default=1, description="Slider")
- bl_description = ("Slider Float 5")
- slider_i01 : bpy.props.IntProperty(
- name="i1", default=1, description="Slider")
- bl_description = ("Slider Integer 1")
- slider_i02 : bpy.props.IntProperty(
- name="i2", default=1, description="Slider")
- bl_description = ("Slider Integer 2")
- slider_i03 : bpy.props.IntProperty(
- name="i3", default=1, description="Slider")
- bl_description = ("Slider Integer 3")
- slider_i04 : bpy.props.IntProperty(
- name="i4", default=1, description="Slider")
- bl_description = ("Slider Integer 4")
- slider_i05 : bpy.props.IntProperty(
- name="i5", default=1, description="Slider")
- bl_description = ("Slider Integer 5")
-
- def invoke(self, context, event):
- return context.window_manager.invoke_props_dialog(self, width=350)
-
- def draw(self, context):
- layout = self.layout
- #layout.label(text="Examples")
- layout.prop(self, "examples", text="Examples")
- #if self.examples == 'CUSTOM':
- layout.label(text="Formula")
- layout.prop(self, "formula", text="")
- #try: self.examples = self.formula
- #except: pass
-
- if self.examples != self.old_ex and self.examples != 'CUSTOM':
- self.formula = self.examples
- self.old_ex = self.examples
- elif self.formula != self.examples:
- self.examples = 'CUSTOM'
- formula = self.formula
-
- layout.separator()
- if "f1" in formula: layout.prop(self, "slider_f01")
- if "f2" in formula: layout.prop(self, "slider_f02")
- if "f3" in formula: layout.prop(self, "slider_f03")
- if "f4" in formula: layout.prop(self, "slider_f04")
- if "f5" in formula: layout.prop(self, "slider_f05")
- if "i1" in formula: layout.prop(self, "slider_i01")
- if "i2" in formula: layout.prop(self, "slider_i02")
- if "i3" in formula: layout.prop(self, "slider_i03")
- if "i4" in formula: layout.prop(self, "slider_i04")
- if "i5" in formula: layout.prop(self, "slider_i05")
-
- layout.label(text="Variables (for each vertex):")
- layout.label(text="lx, ly, lz: Local Coordinates", icon='ORIENTATION_LOCAL')
- layout.label(text="gx, gy, gz: Global Coordinates", icon='WORLD')
- layout.label(text="rx, ry, rz: Local Coordinates (0 to 1)", icon='NORMALIZE_FCURVES')
- layout.label(text="nx, ny, nz: Normal Coordinates", icon='SNAP_NORMAL')
- layout.label(text="w[0], w[1], w[2], ... : Vertex Groups", icon="GROUP_VERTEX")
- layout.separator()
- layout.label(text="f1, f2, f3, f4, f5: Float Sliders", icon='MOD_HUE_SATURATION')#PROPERTIES
- layout.label(text="i1, i2, i3, i4, i5: Integer Sliders", icon='MOD_HUE_SATURATION')
- layout.separator()
- #layout.label(text="All mathematical functions are based on Numpy", icon='INFO')
- #layout.label(text="https://docs.scipy.org/doc/numpy-1.13.0/reference/routines.math.html", icon='INFO')
- layout.operator("scene.weight_formula_wiki", icon="HELP")
- #layout.label(text="(where 'i' is the index of the Vertex Group)")
-
- def execute(self, context):
- ob = bpy.context.active_object
- n_verts = len(ob.data.vertices)
- #if self.examples == 'CUSTOM':
- # formula = self.formula
- #else:
- #self.formula = self.examples
- # formula = self.examples
-
- #f1, f2, f3, f4, f5 = self.slider_f01, self.slider_f02, self.slider_f03, self.slider_f04, self.slider_f05
- #i1, i2, i3, i4, i5 = self.slider_i01, self.slider_i02, self.slider_i03, self.slider_i04, self.slider_i05
- f_sliders = self.slider_f01, self.slider_f02, self.slider_f03, self.slider_f04, self.slider_f05
- i_sliders = self.slider_i01, self.slider_i02, self.slider_i03, self.slider_i04, self.slider_i05
-
- if self.examples != self.old_ex and self.examples != 'CUSTOM':
- self.formula = self.examples
- self.old_ex = self.examples
- elif self.formula != self.examples:
- self.examples = 'CUSTOM'
- formula = self.formula
-
- if formula == "": return {'FINISHED'}
- vertex_group_name = "Formula " + formula
- ob.vertex_groups.new(name=vertex_group_name)
-
- weight = compute_formula(ob, formula=formula, float_var=f_sliders, int_var=i_sliders)
- if type(weight) == str:
- self.report({'ERROR'}, weight)
- return {'CANCELLED'}
-
- #start_time = timeit.default_timer()
- weight = nan_to_num(weight)
- if type(weight) == int or type(weight) == float:
- for i in range(n_verts):
- ob.vertex_groups[-1].add([i], weight, 'REPLACE')
- elif type(weight) == ndarray:
- for i in range(n_verts):
- ob.vertex_groups[-1].add([i], weight[i], 'REPLACE')
- ob.data.update()
- bpy.ops.object.mode_set(mode='WEIGHT_PAINT')
-
- # Store formula settings
- new_formula = ob.formula_settings.add()
- new_formula.name = ob.vertex_groups[-1].name
- new_formula.formula = formula
- new_formula.int_var = i_sliders
- new_formula.float_var = f_sliders
-
- #for f in ob.formula_settings:
- # print(f.name, f.formula, f.int_var, f.float_var)
- return {'FINISHED'}
-
-class _weight_laplacian(bpy.types.Operator):
- bl_idname = "object._weight_laplacian"
- bl_label = "Weight Laplacian"
- bl_description = ("Compute the Vertex Group Laplacian")
- bl_options = {'REGISTER', 'UNDO'}
-
- bounds : bpy.props.EnumProperty(
- items=(('MANUAL', "Manual Bounds", ""),
- ('POSITIVE', "Positive Only", ""),
- ('NEGATIVE', "Negative Only", ""),
- ('AUTOMATIC', "Automatic Bounds", "")),
- default='AUTOMATIC', name="Bounds")
-
- mode : bpy.props.EnumProperty(
- items=(('LENGTH', "Length Weight", ""),
- ('SIMPLE', "Simple", "")),
- default='SIMPLE', name="Evaluation Mode")
-
- min_def : bpy.props.FloatProperty(
- name="Min", default=0, soft_min=-1, soft_max=0,
- description="Laplacian value with 0 weight")
-
- max_def : bpy.props.FloatProperty(
- name="Max", default=0.5, soft_min=0, soft_max=5,
- description="Laplacian value with 1 weight")
-
- bounds_string = ""
-
- frame = None
-
- @classmethod
- def poll(cls, context):
- return len(context.object.vertex_groups) > 0
-
- def draw(self, context):
- layout = self.layout
- col = layout.column(align=True)
- col.label(text="Evaluation Mode")
- col.prop(self, "mode", text="")
- col.label(text="Bounds")
- col.prop(self, "bounds", text="")
- if self.bounds == 'MANUAL':
- col.label(text="Strain Rate \u03B5:")
- col.prop(self, "min_def")
- col.prop(self, "max_def")
- col.label(text="\u03B5" + ": from " + self.bounds_string)
-
-
- def execute(self, context):
- try: ob = context.object
- except:
- self.report({'ERROR'}, "Please select an Object")
- return {'CANCELLED'}
-
- group_id = ob.vertex_groups.active_index
- input_group = ob.vertex_groups[group_id].name
-
- group_name = "Laplacian"
- ob.vertex_groups.new(name=group_name)
- me = ob.data
- bm = bmesh.new()
- bm.from_mesh(me)
- bm.edges.ensure_lookup_table()
-
- # store weight values
- weight = []
- for v in me.vertices:
- try:
- weight.append(ob.vertex_groups[input_group].weight(v.index))
- except:
- weight.append(0)
-
- n_verts = len(bm.verts)
- lap = [0]*n_verts
- for e in bm.edges:
- if self.mode == 'LENGTH':
- length = e.calc_length()
- if length == 0: continue
- id0 = e.verts[0].index
- id1 = e.verts[1].index
- lap[id0] += weight[id1]/length - weight[id0]/length
- lap[id1] += weight[id0]/length - weight[id1]/length
- else:
- id0 = e.verts[0].index
- id1 = e.verts[1].index
- lap[id0] += weight[id1] - weight[id0]
- lap[id1] += weight[id0] - weight[id1]
-
- mean_lap = mean(lap)
- stdev_lap = stdev(lap)
- filter_lap = [i for i in lap if mean_lap-2*stdev_lap < i < mean_lap+2*stdev_lap]
- if self.bounds == 'MANUAL':
- min_def = self.min_def
- max_def = self.max_def
- elif self.bounds == 'AUTOMATIC':
- min_def = min(filter_lap)
- max_def = max(filter_lap)
- self.min_def = min_def
- self.max_def = max_def
- elif self.bounds == 'NEGATIVE':
- min_def = 0
- max_def = min(filter_lap)
- self.min_def = min_def
- self.max_def = max_def
- elif self.bounds == 'POSITIVE':
- min_def = 0
- max_def = max(filter_lap)
- self.min_def = min_def
- self.max_def = max_def
- delta_def = max_def - min_def
-
- # check undeformed errors
- if delta_def == 0: delta_def = 0.0001
-
- for i in range(len(lap)):
- val = (lap[i]-min_def)/delta_def
- if val > 0.7: print(str(val) + " " + str(lap[i]))
- #val = weight[i] + 0.2*lap[i]
- ob.vertex_groups[-1].add([i], val, 'REPLACE')
- self.bounds_string = str(round(min_def,2)) + " to " + str(round(max_def,2))
- ob.vertex_groups[-1].name = group_name + " " + self.bounds_string
- ob.vertex_groups.update()
- ob.data.update()
- bpy.ops.object.mode_set(mode='WEIGHT_PAINT')
- return {'FINISHED'}
-
-class weight_laplacian(bpy.types.Operator):
- bl_idname = "object.weight_laplacian"
- bl_label = "Weight Laplacian"
- bl_description = ("Compute the Vertex Group Laplacian")
- bl_options = {'REGISTER', 'UNDO'}
-
- steps : bpy.props.IntProperty(
- name="Steps", default=10, min=0, soft_max=50,
- description="Number of Steps")
-
- dt : bpy.props.FloatProperty(
- name="dt", default=0.2, min=0, soft_max=0.2,
- description="Time Step")
-
- diff_a : bpy.props.FloatProperty(
- name="Diff A", default=1, min=0, soft_max=2,
- description="Diffusion A")
-
- diff_b : bpy.props.FloatProperty(
- name="Diff B", default=0.5, min=0, soft_max=2,
- description="Diffusion B")
-
- f : bpy.props.FloatProperty(
- name="f", default=0.055, min=0, soft_max=0.5,
- description="Feed Rate")
-
- k : bpy.props.FloatProperty(
- name="k", default=0.062, min=0, soft_max=0.5,
- description="Kill Rate")
-
- diff_mult : bpy.props.FloatProperty(
- name="Scale", default=1, min=0, soft_max=1, max=2, precision=2,
- description="Multiplier for the diffusion of both substances")
-
- bounds_string = ""
-
- frame = None
-
- @classmethod
- def poll(cls, context):
- return len(context.object.vertex_groups) > 0
-
-
- def execute(self, context):
- try: ob = context.object
- except:
- self.report({'ERROR'}, "Please select an Object")
- return {'CANCELLED'}
-
- me = ob.data
- bm = bmesh.new()
- bm.from_mesh(me)
- bm.edges.ensure_lookup_table()
-
- # store weight values
- a = []
- b = []
- for v in me.vertices:
- try:
- a.append(ob.vertex_groups["A"].weight(v.index))
- except:
- a.append(0)
- try:
- b.append(ob.vertex_groups["B"].weight(v.index))
- except:
- b.append(0)
-
- a = array(a)
- b = array(b)
- f = self.f
- k = self.k
- diff_a = self.diff_a * self.diff_mult
- diff_b = self.diff_b * self.diff_mult
- dt = self.dt
-
- # initialize
- n_verts = len(bm.verts)
- # find max number of edges for vertex
- max_edges = 0
- n_neighbors = []
- id_neighbors = []
- for v in bm.verts:
- n_edges = len(v.link_edges)
- max_edges = max(max_edges, n_edges)
- n_neighbors.append(n_edges)
- neighbors = []
- for e in link_edges:
- for v1 in e.verts:
- if v != v1: neighbors.append(v1.index)
- id_neighbors.append(neighbors)
- n_neighbors = array(n_neighbors)
-
-
- a = [[] for i in range(n_verts)]
- lap_map = []
-
- for e in bm.edges:
- id0 = e.verts[0].index
- id1 = e.verts[1].index
- lap_map[id0].append(id1)
- lap_map[id1].append(id0)
-
- e1 = array(e1)
- e2 = array(e2)
- lap_a = a[e1]
-
- for i in range(self.steps):
-
- lap_a = zeros((n_verts))#[0]*n_verts
- lap_b = zeros((n_verts))#[0]*n_verts
- for e in bm.edges:
- id0 = e.verts[0].index
- id1 = e.verts[1].index
- lap_a[id0] += a[id1] - a[id0]
- lap_a[id1] += a[id0] - a[id1]
- lap_b[id0] += b[id1] - b[id0]
- lap_b[id1] += b[id0] - b[id1]
- ab2 = a*b**2
- a += (diff_a*lap_a - ab2 + f*(1-a))*dt
- b += (diff_b*lap_b + ab2 - (k+f)*b)*dt
-
- for i in range(n_verts):
- ob.vertex_groups['A'].add([i], a[i], 'REPLACE')
- ob.vertex_groups['B'].add([i], b[i], 'REPLACE')
- ob.vertex_groups.update()
- ob.data.update()
- bpy.ops.object.mode_set(mode='WEIGHT_PAINT')
- return {'FINISHED'}
-
-
-class reaction_diffusion(bpy.types.Operator):
- bl_idname = "object.reaction_diffusion"
- bl_label = "Reaction Diffusion"
- bl_description = ("Run a Reaction-Diffusion based on existing Vertex Groups: A and B")
- bl_options = {'REGISTER', 'UNDO'}
-
- steps : bpy.props.IntProperty(
- name="Steps", default=10, min=0, soft_max=50,
- description="Number of Steps")
-
- dt : bpy.props.FloatProperty(
- name="dt", default=0.2, min=0, soft_max=0.2,
- description="Time Step")
-
- diff_a : bpy.props.FloatProperty(
- name="Diff A", default=1, min=0, soft_max=2,
- description="Diffusion A")
-
- diff_b : bpy.props.FloatProperty(
- name="Diff B", default=0.5, min=0, soft_max=2,
- description="Diffusion B")
-
- f : bpy.props.FloatProperty(
- name="f", default=0.055, min=0, soft_max=0.5,
- description="Feed Rate")
-
- k : bpy.props.FloatProperty(
- name="k", default=0.062, min=0, soft_max=0.5,
- description="Kill Rate")
-
- bounds_string = ""
-
- frame = None
-
- @classmethod
- def poll(cls, context):
- return len(context.object.vertex_groups) > 0
-
-
- def execute(self, context):
- #bpy.app.handlers.frame_change_post.remove(reaction_diffusion_def)
- reaction_diffusion_add_handler(self, context)
- set_animatable_fix_handler(self, context)
- try: ob = context.object
- except:
- self.report({'ERROR'}, "Please select an Object")
- return {'CANCELLED'}
-
- me = ob.data
- bm = bmesh.new()
- bm.from_mesh(me)
- bm.edges.ensure_lookup_table()
-
- # store weight values
- a = []
- b = []
- for v in me.vertices:
- try:
- a.append(ob.vertex_groups["A"].weight(v.index))
- except:
- a.append(0)
- try:
- b.append(ob.vertex_groups["B"].weight(v.index))
- except:
- b.append(0)
-
- a = array(a)
- b = array(b)
- f = self.f
- k = self.k
- diff_a = self.diff_a
- diff_b = self.diff_b
- dt = self.dt
- n_verts = len(bm.verts)
-
- for i in range(self.steps):
-
- lap_a = zeros((n_verts))#[0]*n_verts
- lap_b = zeros((n_verts))#[0]*n_verts
- for e in bm.edges:
- id0 = e.verts[0].index
- id1 = e.verts[1].index
- lap_a[id0] += a[id1] - a[id0]
- lap_a[id1] += a[id0] - a[id1]
- lap_b[id0] += b[id1] - b[id0]
- lap_b[id1] += b[id0] - b[id1]
- ab2 = a*b**2
- a += (diff_a*lap_a - ab2 + f*(1-a))*dt
- b += (diff_b*lap_b + ab2 - (k+f)*b)*dt
-
- for i in range(n_verts):
- ob.vertex_groups['A'].add([i], a[i], 'REPLACE')
- ob.vertex_groups['B'].add([i], b[i], 'REPLACE')
- ob.vertex_groups.update()
- ob.data.update()
-
- bpy.ops.wm.redraw_timer(type='DRAW_WIN_SWAP', iterations=1)
-
- bpy.ops.object.mode_set(mode='WEIGHT_PAINT')
- return {'FINISHED'}
-
-
-class edges_deformation(bpy.types.Operator):
- bl_idname = "object.edges_deformation"
- bl_label = "Edges Deformation"
- bl_description = ("Compute Weight based on the deformation of edges "
- "according to visible modifiers")
- bl_options = {'REGISTER', 'UNDO'}
-
- bounds : bpy.props.EnumProperty(
- items=(('MANUAL', "Manual Bounds", ""),
- ('COMPRESSION', "Compressed Only", ""),
- ('TENSION', "Extended Only", ""),
- ('AUTOMATIC', "Automatic Bounds", "")),
- default='AUTOMATIC', name="Bounds")
-
- mode : bpy.props.EnumProperty(
- items=(('MAX', "Max Deformation", ""),
- ('MEAN', "Average Deformation", "")),
- default='MEAN', name="Evaluation Mode")
-
- min_def : bpy.props.FloatProperty(
- name="Min", default=0, soft_min=-1, soft_max=0,
- description="Deformations with 0 weight")
-
- max_def : bpy.props.FloatProperty(
- name="Max", default=0.5, soft_min=0, soft_max=5,
- description="Deformations with 1 weight")
-
- bounds_string = ""
-
- frame = None
-
- @classmethod
- def poll(cls, context):
- return len(context.object.modifiers) > 0
-
- def draw(self, context):
- layout = self.layout
- col = layout.column(align=True)
- col.label(text="Evaluation Mode")
- col.prop(self, "mode", text="")
- col.label(text="Bounds")
- col.prop(self, "bounds", text="")
- if self.bounds == 'MANUAL':
- col.label(text="Strain Rate \u03B5:")
- col.prop(self, "min_def")
- col.prop(self, "max_def")
- col.label(text="\u03B5" + ": from " + self.bounds_string)
-
- def execute(self, context):
- try: ob = context.object
- except:
- self.report({'ERROR'}, "Please select an Object")
- return {'CANCELLED'}
-
- # check if the object is Cloth or Softbody
- physics = False
- for m in ob.modifiers:
- if m.type == 'CLOTH' or m.type == 'SOFT_BODY':
- physics = True
- if context.scene.frame_current == 1 and self.frame != None:
- context.scene.frame_current = self.frame
- break
- if not physics: self.frame = None
-
- if self.mode == 'MEAN': group_name = "Average Deformation"
- elif self.mode == 'MAX': group_name = "Max Deformation"
- ob.vertex_groups.new(name=group_name)
- me0 = ob.data
-
- me = simple_to_mesh(ob) #ob.to_mesh(preserve_all_data_layers=True, depsgraph=bpy.context.evaluated_depsgraph_get()).copy()
- if len(me.vertices) != len(me0.vertices) or len(me.edges) != len(me0.edges):
- self.report({'ERROR'}, "The topology of the object should be" +
- "unaltered")
- return {'CANCELLED'}
-
- bm0 = bmesh.new()
- bm0.from_mesh(me0)
- bm = bmesh.new()
- bm.from_mesh(me)
- deformations = []
- for e0, e in zip(bm0.edges, bm.edges):
- try:
- l0 = e0.calc_length()
- l1 = e.calc_length()
- epsilon = (l1 - l0)/l0
- deformations.append(epsilon)
- except: deformations.append(1)
- v_deformations = []
- for v in bm.verts:
- vdef = []
- for e in v.link_edges:
- vdef.append(deformations[e.index])
- if self.mode == 'MEAN': v_deformations.append(mean(vdef))
- elif self.mode == 'MAX': v_deformations.append(max(vdef, key=abs))
- #elif self.mode == 'MIN': v_deformations.append(min(vdef, key=abs))
-
- if self.bounds == 'MANUAL':
- min_def = self.min_def
- max_def = self.max_def
- elif self.bounds == 'AUTOMATIC':
- min_def = min(v_deformations)
- max_def = max(v_deformations)
- self.min_def = min_def
- self.max_def = max_def
- elif self.bounds == 'COMPRESSION':
- min_def = 0
- max_def = min(v_deformations)
- self.min_def = min_def
- self.max_def = max_def
- elif self.bounds == 'TENSION':
- min_def = 0
- max_def = max(v_deformations)
- self.min_def = min_def
- self.max_def = max_def
- delta_def = max_def - min_def
-
- # check undeformed errors
- if delta_def == 0:
- if self.bounds == 'MANUAL':
- delta_def = 0.0001
- else:
- message = "The object doesn't have deformations."
- if physics:
- message = message + ("\nIf you are using Physics try to " +
- "save it in the cache before.")
- self.report({'ERROR'}, message)
- return {'CANCELLED'}
- else:
- if physics:
- self.frame = context.scene.frame_current
-
- for i in range(len(v_deformations)):
- weight = (v_deformations[i] - min_def)/delta_def
- ob.vertex_groups[-1].add([i], weight, 'REPLACE')
- self.bounds_string = str(round(min_def,2)) + " to " + str(round(max_def,2))
- ob.vertex_groups[-1].name = group_name + " " + self.bounds_string
- ob.vertex_groups.update()
- ob.data.update()
- bpy.ops.object.mode_set(mode='WEIGHT_PAINT')
- bpy.data.meshes.remove(me)
- return {'FINISHED'}
-
-class edges_bending(bpy.types.Operator):
- bl_idname = "object.edges_bending"
- bl_label = "Edges Bending"
- bl_description = ("Compute Weight based on the bending of edges "
- "according to visible modifiers")
- bl_options = {'REGISTER', 'UNDO'}
-
- bounds : bpy.props.EnumProperty(
- items=(('MANUAL', "Manual Bounds", ""),
- ('POSITIVE', "Positive Only", ""),
- ('NEGATIVE', "Negative Only", ""),
- ('UNSIGNED', "Absolute Bending", ""),
- ('AUTOMATIC', "Signed Bending", "")),
- default='AUTOMATIC', name="Bounds")
-
- min_def : bpy.props.FloatProperty(
- name="Min", default=-10, soft_min=-45, soft_max=45,
- description="Deformations with 0 weight")
-
- max_def : bpy.props.FloatProperty(
- name="Max", default=10, soft_min=-45, soft_max=45,
- description="Deformations with 1 weight")
-
- bounds_string = ""
- frame = None
-
- @classmethod
- def poll(cls, context):
- return len(context.object.modifiers) > 0
-
- def draw(self, context):
- layout = self.layout
- layout.label(text="Bounds")
- layout.prop(self, "bounds", text="")
- if self.bounds == 'MANUAL':
- layout.prop(self, "min_def")
- layout.prop(self, "max_def")
-
- def execute(self, context):
- try: ob = context.object
- except:
- self.report({'ERROR'}, "Please select an Object")
- return {'CANCELLED'}
-
- group_name = "Edges Bending"
- ob.vertex_groups.new(name=group_name)
-
- # check if the object is Cloth or Softbody
- physics = False
- for m in ob.modifiers:
- if m.type == 'CLOTH' or m.type == 'SOFT_BODY':
- physics = True
- if context.scene.frame_current == 1 and self.frame != None:
- context.scene.frame_current = self.frame
- break
- if not physics: self.frame = None
-
- #ob.data.update()
- #context.scene.update()
- me0 = ob.data
- me = simple_to_mesh(ob) #ob.to_mesh(preserve_all_data_layers=True, depsgraph=bpy.context.evaluated_depsgraph_get()).copy()
- if len(me.vertices) != len(me0.vertices) or len(me.edges) != len(me0.edges):
- self.report({'ERROR'}, "The topology of the object should be" +
- "unaltered")
- bm0 = bmesh.new()
- bm0.from_mesh(me0)
- bm = bmesh.new()
- bm.from_mesh(me)
- deformations = []
- for e0, e in zip(bm0.edges, bm.edges):
- try:
- ang = e.calc_face_angle_signed()
- ang0 = e0.calc_face_angle_signed()
- if self.bounds == 'UNSIGNED':
- deformations.append(abs(ang-ang0))
- else:
- deformations.append(ang-ang0)
- except: deformations.append(0)
- v_deformations = []
- for v in bm.verts:
- vdef = []
- for e in v.link_edges:
- vdef.append(deformations[e.index])
- v_deformations.append(mean(vdef))
- if self.bounds == 'MANUAL':
- min_def = radians(self.min_def)
- max_def = radians(self.max_def)
- elif self.bounds == 'AUTOMATIC':
- min_def = min(v_deformations)
- max_def = max(v_deformations)
- elif self.bounds == 'POSITIVE':
- min_def = 0
- max_def = min(v_deformations)
- elif self.bounds == 'NEGATIVE':
- min_def = 0
- max_def = max(v_deformations)
- elif self.bounds == 'UNSIGNED':
- min_def = 0
- max_def = max(v_deformations)
- delta_def = max_def - min_def
-
- # check undeformed errors
- if delta_def == 0:
- if self.bounds == 'MANUAL':
- delta_def = 0.0001
- else:
- message = "The object doesn't have deformations."
- if physics:
- message = message + ("\nIf you are using Physics try to " +
- "save it in the cache before.")
- self.report({'ERROR'}, message)
- return {'CANCELLED'}
- else:
- if physics:
- self.frame = context.scene.frame_current
-
- for i in range(len(v_deformations)):
- weight = (v_deformations[i] - min_def)/delta_def
- ob.vertex_groups[-1].add([i], weight, 'REPLACE')
- self.bounds_string = str(round(min_def,2)) + " to " + str(round(max_def,2))
- ob.vertex_groups[-1].name = group_name + " " + self.bounds_string
- ob.vertex_groups.update()
- ob.data.update()
- bpy.ops.object.mode_set(mode='WEIGHT_PAINT')
- bpy.data.meshes.remove(me)
- return {'FINISHED'}
-
-class weight_contour_displace(bpy.types.Operator):
- bl_idname = "object.weight_contour_displace"
- bl_label = "Contour Displace"
- bl_description = ("")
- bl_options = {'REGISTER', 'UNDO'}
-
- use_modifiers : bpy.props.BoolProperty(
- name="Use Modifiers", default=True,
- description="Apply all the modifiers")
- min_iso : bpy.props.FloatProperty(
- name="Min Iso Value", default=0.49, min=0, max=1,
- description="Threshold value")
- max_iso : bpy.props.FloatProperty(
- name="Max Iso Value", default=0.51, min=0, max=1,
- description="Threshold value")
- n_cuts : bpy.props.IntProperty(
- name="Cuts", default=2, min=1, soft_max=10,
- description="Number of cuts in the selected range of values")
- bool_displace : bpy.props.BoolProperty(
- name="Add Displace", default=True, description="Add Displace Modifier")
- bool_flip : bpy.props.BoolProperty(
- name="Flip", default=False, description="Flip Output Weight")
-
- weight_mode : bpy.props.EnumProperty(
- items=[('Remapped', 'Remapped', 'Remap values'),
- ('Alternate', 'Alternate', 'Alternate 0 and 1'),
- ('Original', 'Original', 'Keep original Vertex Group')],
- name="Weight", description="Choose how to convert vertex group",
- default="Remapped", options={'LIBRARY_EDITABLE'})
-
- @classmethod
- def poll(cls, context):
- return len(context.object.vertex_groups) > 0
-
- def invoke(self, context, event):
- return context.window_manager.invoke_props_dialog(self, width=350)
-
- def execute(self, context):
- start_time = timeit.default_timer()
- try:
- check = bpy.context.object.vertex_groups[0]
- except:
- self.report({'ERROR'}, "The object doesn't have Vertex Groups")
- return {'CANCELLED'}
-
- ob0 = bpy.context.object
-
- group_id = ob0.vertex_groups.active_index
- vertex_group_name = ob0.vertex_groups[group_id].name
-
- bpy.ops.object.mode_set(mode='EDIT')
- bpy.ops.mesh.select_all(action='SELECT')
- bpy.ops.object.mode_set(mode='OBJECT')
- if self.use_modifiers:
- #me0 = ob0.to_mesh(preserve_all_data_layers=True, depsgraph=bpy.context.evaluated_depsgraph_get()).copy()
- me0 = simple_to_mesh(ob0)
- else:
- me0 = ob0.data.copy()
-
- # generate new bmesh
- bm = bmesh.new()
- bm.from_mesh(me0)
- bm.verts.ensure_lookup_table()
- bm.edges.ensure_lookup_table()
- bm.faces.ensure_lookup_table()
-
- # store weight values
- weight = []
- ob = bpy.data.objects.new("temp", me0)
- for g in ob0.vertex_groups:
- ob.vertex_groups.new(name=g.name)
- for v in me0.vertices:
- try:
- weight.append(ob.vertex_groups[vertex_group_name].weight(v.index))
- except:
- weight.append(0)
-
- # define iso values
- iso_values = []
- for i_cut in range(self.n_cuts):
- delta_iso = abs(self.max_iso - self.min_iso)
- min_iso = min(self.min_iso, self.max_iso)
- max_iso = max(self.min_iso, self.max_iso)
- if delta_iso == 0: iso_val = min_iso
- elif self.n_cuts > 1: iso_val = i_cut/(self.n_cuts-1)*delta_iso + min_iso
- else: iso_val = (self.max_iso + self.min_iso)/2
- iso_values.append(iso_val)
-
- # Start Cuts Iterations
- filtered_edges = bm.edges
- for iso_val in iso_values:
- delete_edges = []
-
- faces_mask = []
- for f in bm.faces:
- w_min = 2
- w_max = 2
- for v in f.verts:
- w = weight[v.index]
- if w_min == 2:
- w_max = w_min = w
- if w > w_max: w_max = w
- if w < w_min: w_min = w
- if w_min < iso_val and w_max > iso_val:
- faces_mask.append(f)
- break
-
- #link_faces = [[f for f in e.link_faces] for e in bm.edges]
-
- #faces_todo = [f.select for f in bm.faces]
- #faces_todo = [True for f in bm.faces]
- verts = []
- edges = []
- edges_id = {}
- _filtered_edges = []
- n_verts = len(bm.verts)
- count = n_verts
- for e in filtered_edges:
- #id0 = e.vertices[0]
- #id1 = e.vertices[1]
- id0 = e.verts[0].index
- id1 = e.verts[1].index
- w0 = weight[id0]
- w1 = weight[id1]
-
- if w0 == w1: continue
- elif w0 > iso_val and w1 > iso_val:
- _filtered_edges.append(e)
- continue
- elif w0 < iso_val and w1 < iso_val: continue
- elif w0 == iso_val or w1 == iso_val:
- _filtered_edges.append(e)
- continue
- else:
- v0 = bm.verts[id0].co
- v1 = bm.verts[id1].co
- v = v0.lerp(v1, (iso_val-w0)/(w1-w0))
- if e not in delete_edges:
- delete_edges.append(e)
- verts.append(v)
- edges_id[str(id0)+"_"+str(id1)] = count
- edges_id[str(id1)+"_"+str(id0)] = count
- count += 1
- _filtered_edges.append(e)
- filtered_edges = _filtered_edges
- splitted_faces = []
-
- switch = False
- # splitting faces
- for f in faces_mask:
- # create sub-faces slots. Once a new vertex is reached it will
- # change slot, storing the next vertices for a new face.
- build_faces = [[],[]]
- #switch = False
- verts0 = [v.index for v in f.verts]
- verts1 = list(verts0)
- verts1.append(verts1.pop(0)) # shift list
- for id0, id1 in zip(verts0, verts1):
-
- # add first vertex to active slot
- build_faces[switch].append(id0)
-
- # try to split edge
- try:
- # check if the edge must be splitted
- new_vert = edges_id[str(id0)+"_"+str(id1)]
- # add new vertex
- build_faces[switch].append(new_vert)
- # if there is an open face on the other slot
- if len(build_faces[not switch]) > 0:
- # store actual face
- splitted_faces.append(build_faces[switch])
- # reset actual faces and switch
- build_faces[switch] = []
- # change face slot
- switch = not switch
- # continue previous face
- build_faces[switch].append(new_vert)
- except: pass
- if len(build_faces[not switch]) == 2:
- build_faces[not switch].append(id0)
- if len(build_faces[not switch]) > 2:
- splitted_faces.append(build_faces[not switch])
- # add last face
- splitted_faces.append(build_faces[switch])
- #del_faces.append(f.index)
-
- # adding new vertices
- for v in verts: new_vert = bm.verts.new(v)
- bm.verts.index_update()
- bm.verts.ensure_lookup_table()
- # adding new faces
- missed_faces = []
- added_faces = []
- for f in splitted_faces:
- try:
- face_verts = [bm.verts[i] for i in f]
- new_face = bm.faces.new(face_verts)
- for e in new_face.edges:
- filtered_edges.append(e)
- except:
- missed_faces.append(f)
-
- bm.faces.ensure_lookup_table()
- # updating weight values
- weight = weight + [iso_val]*len(verts)
-
- # deleting old edges/faces
- bm.edges.ensure_lookup_table()
- for e in delete_edges:
- bm.edges.remove(e)
- _filtered_edges = []
- for e in filtered_edges:
- if e not in delete_edges: _filtered_edges.append(e)
- filtered_edges = _filtered_edges
-
- name = ob0.name + '_ContourDisp'
- me = bpy.data.meshes.new(name)
- bm.to_mesh(me)
- ob = bpy.data.objects.new(name, me)
-
- # Link object to scene and make active
- scn = bpy.context.scene
- bpy.context.collection.objects.link(ob)
- bpy.context.view_layer.objects.active = ob
- ob.select_set(True)
- ob0.select_set(False)
-
- # generate new vertex group
- for g in ob0.vertex_groups:
- ob.vertex_groups.new(name=g.name)
- #ob.vertex_groups.new(name=vertex_group_name)
-
- all_weight = weight + [iso_val]*len(verts)
- #mult = 1/(1-iso_val)
- for id in range(len(all_weight)):
- #if False: w = (all_weight[id]-iso_val)*mult
- w = all_weight[id]
- if self.weight_mode == 'Alternate':
- direction = self.bool_flip
- for i in range(len(iso_values)-1):
- val0, val1 = iso_values[i], iso_values[i+1]
- if val0 < w <= val1:
- if direction: w1 = (w-val0)/(val1-val0)
- else: w1 = (val1-w)/(val1-val0)
- direction = not direction
- if w < iso_values[0]: w1 = not self.bool_flip
- if w > iso_values[-1]: w1 = not direction
- elif self.weight_mode == 'Remapped':
- if w < min_iso: w1 = 0
- elif w > max_iso: w1 = 1
- else: w1 = (w - min_iso)/delta_iso
- else:
- if self.bool_flip: w1 = 1-w
- else: w1 = w
- ob.vertex_groups[vertex_group_name].add([id], w1, 'REPLACE')
-
- ob.vertex_groups.active_index = group_id
-
- # align new object
- ob.matrix_world = ob0.matrix_world
-
- # Displace Modifier
- if self.bool_displace:
- ob.modifiers.new(type='DISPLACE', name='Displace')
- ob.modifiers["Displace"].mid_level = 0
- ob.modifiers["Displace"].strength = 0.1
- ob.modifiers['Displace'].vertex_group = vertex_group_name
-
- bpy.ops.object.mode_set(mode='EDIT')
- bpy.ops.object.mode_set(mode='WEIGHT_PAINT')
- print("Contour Displace time: " + str(timeit.default_timer() - start_time) + " sec")
-
- bpy.data.meshes.remove(me0)
-
- return {'FINISHED'}
-
-class weight_contour_mask(bpy.types.Operator):
- bl_idname = "object.weight_contour_mask"
- bl_label = "Contour Mask"
- bl_description = ("")
- bl_options = {'REGISTER', 'UNDO'}
-
- use_modifiers : bpy.props.BoolProperty(
- name="Use Modifiers", default=True,
- description="Apply all the modifiers")
- iso : bpy.props.FloatProperty(
- name="Iso Value", default=0.5, soft_min=0, soft_max=1,
- description="Threshold value")
- bool_solidify : bpy.props.BoolProperty(
- name="Solidify", default=True, description="Add Solidify Modifier")
- normalize_weight : bpy.props.BoolProperty(
- name="Normalize Weight", default=True,
- description="Normalize weight of remaining vertices")
-
- @classmethod
- def poll(cls, context):
- return len(context.object.vertex_groups) > 0
-
- def execute(self, context):
- start_time = timeit.default_timer()
- try:
- check = bpy.context.object.vertex_groups[0]
- except:
- self.report({'ERROR'}, "The object doesn't have Vertex Groups")
- return {'CANCELLED'}
-
- ob0 = bpy.context.object
-
- iso_val = self.iso
- group_id = ob0.vertex_groups.active_index
- vertex_group_name = ob0.vertex_groups[group_id].name
-
- bpy.ops.object.mode_set(mode='EDIT')
- bpy.ops.mesh.select_all(action='SELECT')
- bpy.ops.object.mode_set(mode='OBJECT')
- if self.use_modifiers:
- me0 = simple_to_mesh(ob0)#ob0.to_mesh(preserve_all_data_layers=True, depsgraph=bpy.context.evaluated_depsgraph_get()).copy()
- else:
- me0 = ob0.data.copy()
-
- # generate new bmesh
- bm = bmesh.new()
- bm.from_mesh(me0)
- bm.verts.ensure_lookup_table()
- bm.edges.ensure_lookup_table()
- bm.faces.ensure_lookup_table()
-
- # store weight values
- weight = []
- ob = bpy.data.objects.new("temp", me0)
- for g in ob0.vertex_groups:
- ob.vertex_groups.new(name=g.name)
- for v in me0.vertices:
- try:
- #weight.append(v.groups[vertex_group_name].weight)
- weight.append(ob.vertex_groups[vertex_group_name].weight(v.index))
- except:
- weight.append(0)
-
- faces_mask = []
- for f in bm.faces:
- w_min = 2
- w_max = 2
- for v in f.verts:
- w = weight[v.index]
- if w_min == 2:
- w_max = w_min = w
- if w > w_max: w_max = w
- if w < w_min: w_min = w
- if w_min < iso_val and w_max > iso_val:
- faces_mask.append(f)
- break
-
- filtered_edges = bm.edges# me0.edges
- faces_todo = [f.select for f in bm.faces]
- verts = []
- edges = []
- delete_edges = []
- edges_id = {}
- _filtered_edges = []
- n_verts = len(bm.verts)
- count = n_verts
- for e in filtered_edges:
- id0 = e.verts[0].index
- id1 = e.verts[1].index
- w0 = weight[id0]
- w1 = weight[id1]
-
- if w0 == w1: continue
- elif w0 > iso_val and w1 > iso_val:
- continue
- elif w0 < iso_val and w1 < iso_val: continue
- elif w0 == iso_val or w1 == iso_val: continue
- else:
- v0 = me0.vertices[id0].co
- v1 = me0.vertices[id1].co
- v = v0.lerp(v1, (iso_val-w0)/(w1-w0))
- delete_edges.append(e)
- verts.append(v)
- edges_id[str(id0)+"_"+str(id1)] = count
- edges_id[str(id1)+"_"+str(id0)] = count
- count += 1
-
- splitted_faces = []
-
- switch = False
- # splitting faces
- for f in faces_mask:
- # create sub-faces slots. Once a new vertex is reached it will
- # change slot, storing the next vertices for a new face.
- build_faces = [[],[]]
- #switch = False
- verts0 = list(me0.polygons[f.index].vertices)
- verts1 = list(verts0)
- verts1.append(verts1.pop(0)) # shift list
- for id0, id1 in zip(verts0, verts1):
-
- # add first vertex to active slot
- build_faces[switch].append(id0)
-
- # try to split edge
- try:
- # check if the edge must be splitted
- new_vert = edges_id[str(id0)+"_"+str(id1)]
- # add new vertex
- build_faces[switch].append(new_vert)
- # if there is an open face on the other slot
- if len(build_faces[not switch]) > 0:
- # store actual face
- splitted_faces.append(build_faces[switch])
- # reset actual faces and switch
- build_faces[switch] = []
- # change face slot
- switch = not switch
- # continue previous face
- build_faces[switch].append(new_vert)
- except: pass
- if len(build_faces[not switch]) == 2:
- build_faces[not switch].append(id0)
- if len(build_faces[not switch]) > 2:
- splitted_faces.append(build_faces[not switch])
- # add last face
- splitted_faces.append(build_faces[switch])
-
- # adding new vertices
- for v in verts: bm.verts.new(v)
- bm.verts.ensure_lookup_table()
-
- # deleting old edges/faces
- bm.edges.ensure_lookup_table()
- remove_edges = []
- for e in delete_edges: bm.edges.remove(e)
-
- bm.verts.ensure_lookup_table()
- # adding new faces
- missed_faces = []
- for f in splitted_faces:
- try:
- face_verts = [bm.verts[i] for i in f]
- bm.faces.new(face_verts)
- except:
- missed_faces.append(f)
-
- # Mask geometry
- if(True):
- all_weight = weight + [iso_val+0.0001]*len(verts)
- weight = []
- for w, v in zip(all_weight, bm.verts):
- if w < iso_val: bm.verts.remove(v)
- else: weight.append(w)
-
- # Create mesh and object
- name = ob0.name + '_ContourMask_{:.3f}'.format(iso_val)
- me = bpy.data.meshes.new(name)
- bm.to_mesh(me)
- ob = bpy.data.objects.new(name, me)
-
- # Link object to scene and make active
- scn = bpy.context.scene
- bpy.context.collection.objects.link(ob)
- bpy.context.view_layer.objects.active = ob
- ob.select_set(True)
- ob0.select_set(False)
-
- # generate new vertex group
- for g in ob0.vertex_groups:
- ob.vertex_groups.new(name=g.name)
-
- if iso_val != 1: mult = 1/(1-iso_val)
- else: mult = 1
- for id in range(len(weight)):
- if self.normalize_weight: w = (weight[id]-iso_val)*mult
- else: w = weight[id]
- ob.vertex_groups[vertex_group_name].add([id], w, 'REPLACE')
- ob.vertex_groups.active_index = group_id
-
- # align new object
- ob.matrix_world = ob0.matrix_world
-
- # Add Solidify
- if self.bool_solidify and True:
- ob.modifiers.new(type='SOLIDIFY', name='Solidify')
- ob.modifiers['Solidify'].thickness = 0.05
- ob.modifiers['Solidify'].offset = 0
- ob.modifiers['Solidify'].vertex_group = vertex_group_name
-
- bpy.ops.object.mode_set(mode='EDIT')
- bpy.ops.object.mode_set(mode='WEIGHT_PAINT')
- print("Contour Mask time: " + str(timeit.default_timer() - start_time) + " sec")
-
- bpy.data.meshes.remove(me0)
-
- return {'FINISHED'}
-
-class weight_contour_curves(bpy.types.Operator):
- bl_idname = "object.weight_contour_curves"
- bl_label = "Contour Curves"
- bl_description = ("")
- bl_options = {'REGISTER', 'UNDO'}
-
- use_modifiers : bpy.props.BoolProperty(
- name="Use Modifiers", default=True,
- description="Apply all the modifiers")
-
- min_iso : bpy.props.FloatProperty(
- name="Min Value", default=0., soft_min=0, soft_max=1,
- description="Minimum weight value")
- max_iso : bpy.props.FloatProperty(
- name="Max Value", default=1, soft_min=0, soft_max=1,
- description="Maximum weight value")
- n_curves : bpy.props.IntProperty(
- name="Curves", default=3, soft_min=1, soft_max=10,
- description="Number of Contour Curves")
-
- min_rad : bpy.props.FloatProperty(
- name="Min Radius", default=0.25, soft_min=0, soft_max=1,
- description="Minimum Curve Radius")
- max_rad : bpy.props.FloatProperty(
- name="Max Radius", default=0.75, soft_min=0, soft_max=1,
- description="Maximum Curve Radius")
-
- @classmethod
- def poll(cls, context):
- ob = context.object
- return len(ob.vertex_groups) > 0 or ob.type == 'CURVE'
-
- def invoke(self, context, event):
- return context.window_manager.invoke_props_dialog(self, width=350)
-
- def execute(self, context):
- start_time = timeit.default_timer()
- try:
- check = bpy.context.object.vertex_groups[0]
- except:
- self.report({'ERROR'}, "The object doesn't have Vertex Groups")
- return {'CANCELLED'}
- ob0 = bpy.context.object
-
- group_id = ob0.vertex_groups.active_index
- vertex_group_name = ob0.vertex_groups[group_id].name
-
- bpy.ops.object.mode_set(mode='EDIT')
- bpy.ops.mesh.select_all(action='SELECT')
- bpy.ops.object.mode_set(mode='OBJECT')
- if self.use_modifiers:
- me0 = simple_to_mesh(ob0) #ob0.to_mesh(preserve_all_data_layers=True, depsgraph=bpy.context.evaluated_depsgraph_get()).copy()
- else:
- me0 = ob0.data.copy()
-
- # generate new bmesh
- bm = bmesh.new()
- bm.from_mesh(me0)
- bm.verts.ensure_lookup_table()
- bm.edges.ensure_lookup_table()
- bm.faces.ensure_lookup_table()
-
- # store weight values
- weight = []
- ob = bpy.data.objects.new("temp", me0)
- for g in ob0.vertex_groups:
- ob.vertex_groups.new(name=g.name)
- for v in me0.vertices:
- try:
- #weight.append(v.groups[vertex_group_name].weight)
- weight.append(ob.vertex_groups[vertex_group_name].weight(v.index))
- except:
- weight.append(0)
-
- filtered_edges = bm.edges
- total_verts = []
- total_segments = []
- radius = []
-
- # start iterate contours levels
- for c in range(self.n_curves):
- min_iso = min(self.min_iso, self.max_iso)
- max_iso = max(self.min_iso, self.max_iso)
- try:
- iso_val = c*(max_iso-min_iso)/(self.n_curves-1)+min_iso
- if iso_val < 0: iso_val = (min_iso + max_iso)/2
- except:
- iso_val = (min_iso + max_iso)/2
- faces_mask = []
- for f in bm.faces:
- w_min = 2
- w_max = 2
- for v in f.verts:
- w = weight[v.index]
- if w_min == 2:
- w_max = w_min = w
- if w > w_max: w_max = w
- if w < w_min: w_min = w
- if w_min < iso_val and w_max > iso_val:
- faces_mask.append(f)
- break
-
- faces_todo = [f.select for f in bm.faces]
- verts = []
-
- edges_id = {}
- _filtered_edges = []
- n_verts = len(bm.verts)
- count = len(total_verts)
- for e in filtered_edges:
- id0 = e.verts[0].index
- id1 = e.verts[1].index
- w0 = weight[id0]
- w1 = weight[id1]
-
- if w0 == w1: continue
- elif w0 > iso_val and w1 > iso_val:
- _filtered_edges.append(e)
- continue
- elif w0 < iso_val and w1 < iso_val: continue
- elif w0 == iso_val or w1 == iso_val:
- _filtered_edges.append(e)
- continue
- else:
- #v0 = me0.vertices[id0].select = True
- #v1 = me0.vertices[id1].select = True
- v0 = me0.vertices[id0].co
- v1 = me0.vertices[id1].co
- v = v0.lerp(v1, (iso_val-w0)/(w1-w0))
- verts.append(v)
- edges_id[e.index] = count
- count += 1
- _filtered_edges.append(e)
- filtered_edges = _filtered_edges
-
- if len(verts) == 0: continue
-
- # finding segments
- segments = []
- for f in faces_mask:
- seg = []
- for e in f.edges:
- try:
- seg.append(edges_id[e.index])
- if len(seg) == 2:
- segments.append(seg)
- seg = []
- except: pass
-
- total_segments = total_segments + segments
- total_verts = total_verts + verts
-
- # Radius
-
- try:
- iso_rad = c*(self.max_rad-self.min_rad)/(self.n_curves-1)+self.min_rad
- if iso_rad < 0: iso_rad = (self.min_rad + self.max_rad)/2
- except:
- iso_rad = (self.min_rad + self.max_rad)/2
- radius = radius + [iso_rad]*len(verts)
-
- bm = bmesh.new()
- # adding new vertices
- for v in total_verts: bm.verts.new(v)
- bm.verts.ensure_lookup_table()
-
- # adding new edges
- for s in total_segments:
- try:
- pts = [bm.verts[i] for i in s]
- bm.edges.new(pts)
- except: pass
-
- try:
- name = ob0.name + '_ContourCurves'
- me = bpy.data.meshes.new(name)
- bm.to_mesh(me)
- ob = bpy.data.objects.new(name, me)
-
- # Link object to scene and make active
- scn = bpy.context.scene
- bpy.context.collection.objects.link(ob)
- bpy.context.view_layer.objects.active = ob
- ob.select_set(True)
- ob0.select_set(False)
-
- bpy.ops.object.convert(target='CURVE')
- ob = context.object
- count = 0
- for s in ob.data.splines:
- for p in s.points:
- p.radius = radius[count]
- count += 1
- ob.data.bevel_depth = 0.01
- ob.data.fill_mode = 'FULL'
- ob.data.bevel_resolution = 3
- except:
- self.report({'ERROR'}, "There are no values in the chosen range")
- return {'CANCELLED'}
-
- # align new object
- ob.matrix_world = ob0.matrix_world
- print("Contour Curves time: " + str(timeit.default_timer() - start_time) + " sec")
-
- bpy.data.meshes.remove(me0)
- bpy.data.meshes.remove(me)
-
- return {'FINISHED'}
-
-class vertex_colors_to_vertex_groups(bpy.types.Operator):
- bl_idname = "object.vertex_colors_to_vertex_groups"
- bl_label = "Vertex Color"
- bl_options = {'REGISTER', 'UNDO'}
- bl_description = ("Convert the active Vertex Color into a Vertex Group")
-
- red : bpy.props.BoolProperty(
- name="red channel", default=False, description="convert red channel")
- green : bpy.props.BoolProperty(
- name="green channel", default=False,
- description="convert green channel")
- blue : bpy.props.BoolProperty(
- name="blue channel", default=False, description="convert blue channel")
- value : bpy.props.BoolProperty(
- name="value channel", default=True, description="convert value channel")
- invert : bpy.props.BoolProperty(
- name="invert", default=False, description="invert all color channels")
-
- @classmethod
- def poll(cls, context):
- return len(context.object.data.vertex_colors) > 0
-
- def execute(self, context):
- obj = bpy.context.active_object
- id = len(obj.vertex_groups)
- id_red = id
- id_green = id
- id_blue = id
- id_value = id
-
- boolCol = len(obj.data.vertex_colors)
- if(boolCol): col_name = obj.data.vertex_colors.active.name
- bpy.ops.object.mode_set(mode='EDIT')
- bpy.ops.mesh.select_all(action='SELECT')
-
- if(self.red and boolCol):
- bpy.ops.object.vertex_group_add()
- bpy.ops.object.vertex_group_assign()
- id_red = id
- obj.vertex_groups[id_red].name = col_name + '_red'
- id+=1
- if(self.green and boolCol):
- bpy.ops.object.vertex_group_add()
- bpy.ops.object.vertex_group_assign()
- id_green = id
- obj.vertex_groups[id_green].name = col_name + '_green'
- id+=1
- if(self.blue and boolCol):
- bpy.ops.object.vertex_group_add()
- bpy.ops.object.vertex_group_assign()
- id_blue = id
- obj.vertex_groups[id_blue].name = col_name + '_blue'
- id+=1
- if(self.value and boolCol):
- bpy.ops.object.vertex_group_add()
- bpy.ops.object.vertex_group_assign()
- id_value = id
- obj.vertex_groups[id_value].name = col_name + '_value'
- id+=1
-
- mult = 1
- if(self.invert): mult = -1
- bpy.ops.object.mode_set(mode='OBJECT')
- sub_red = 1 + self.value + self.blue + self.green
- sub_green = 1 + self.value + self.blue
- sub_blue = 1 + self.value
- sub_value = 1
-
- id = len(obj.vertex_groups)
- if(id_red <= id and id_green <= id and id_blue <= id and id_value <= \
- id and boolCol):
- v_colors = obj.data.vertex_colors.active.data
- i = 0
- for f in obj.data.polygons:
- for v in f.vertices:
- gr = obj.data.vertices[v].groups
- if(self.red): gr[min(len(gr)-sub_red, id_red)].weight = \
- self.invert + mult * v_colors[i].color[0]
- if(self.green): gr[min(len(gr)-sub_green, id_green)].weight\
- = self.invert + mult * v_colors[i].color[1]
- if(self.blue): gr[min(len(gr)-sub_blue, id_blue)].weight = \
- self.invert + mult * v_colors[i].color[2]
- if(self.value):
- r = v_colors[i].color[0]
- g = v_colors[i].color[1]
- b = v_colors[i].color[2]
- gr[min(len(gr)-sub_value, id_value)].weight\
- = self.invert + mult * (0.2126*r + 0.7152*g + 0.0722*b)
- i+=1
- bpy.ops.paint.weight_paint_toggle()
- return {'FINISHED'}
-
-class vertex_group_to_vertex_colors(bpy.types.Operator):
- bl_idname = "object.vertex_group_to_vertex_colors"
- bl_label = "Vertex Group"
- bl_options = {'REGISTER', 'UNDO'}
- bl_description = ("Convert the active Vertex Group into a Vertex Color")
-
- channel : bpy.props.EnumProperty(
- items=[('Blue', 'Blue Channel', 'Convert to Blue Channel'),
- ('Green', 'Green Channel', 'Convert to Green Channel'),
- ('Red', 'Red Channel', 'Convert to Red Channel'),
- ('Value', 'Value Channel', 'Convert to Grayscale'),
- ('False Colors', 'False Colors', 'Convert to False Colors')],
- name="Convert to", description="Choose how to convert vertex group",
- default="Value", options={'LIBRARY_EDITABLE'})
-
- invert : bpy.props.BoolProperty(
- name="invert", default=False, description="invert color channel")
-
- @classmethod
- def poll(cls, context):
- return len(context.object.vertex_groups) > 0
-
- def execute(self, context):
- obj = bpy.context.active_object
- group_id = obj.vertex_groups.active_index
- if (group_id == -1):
- return {'FINISHED'}
-
- bpy.ops.object.mode_set(mode='OBJECT')
- group_name = obj.vertex_groups[group_id].name
- bpy.ops.mesh.vertex_color_add()
- colors_id = obj.data.vertex_colors.active_index
-
- colors_name = group_name
- if(self.channel == 'False Colors'): colors_name += "_false_colors"
- elif(self.channel == 'Value'): colors_name += "_value"
- elif(self.channel == 'Red'): colors_name += "_red"
- elif(self.channel == 'Green'): colors_name += "_green"
- elif(self.channel == 'Blue'): colors_name += "_blue"
- bpy.context.object.data.vertex_colors[colors_id].name = colors_name
-
- v_colors = obj.data.vertex_colors.active.data
-
- mult = 1
- if(self.invert): mult = -1
-
- i = 0
- for f in obj.data.polygons:
- for v in f.vertices:
- gr = obj.data.vertices[v].groups
-
- if(self.channel == 'False Colors'): v_colors[i].color = (0,0,0.5,1)
- else: v_colors[i].color = (0,0,0,1)
-
- for g in gr:
- if g.group == group_id:
- w = g.weight
- if(self.channel == 'False Colors'):
- mult = 0.6+0.4*w
- if w < 0.25:
- v_colors[i].color = (0, w*4*mult, 1*mult,1)
- elif w < 0.5:
- v_colors[i].color = (0, 1*mult, (1-(w-0.25)*4)*mult,1)
- elif w < 0.75:
- v_colors[i].color = ((w-0.5)*4*mult,1*mult,0,1)
- else:
- v_colors[i].color = (1*mult,(1-(w-0.75)*4)*mult,0,1)
- elif(self.channel == 'Value'):
- v_colors[i].color = (
- self.invert + mult * w,
- self.invert + mult * w,
- self.invert + mult * w,
- 1)
- elif(self.channel == 'Red'):
- v_colors[i].color = (
- self.invert + mult * w,0,0,1)
- elif(self.channel == 'Green'):
- v_colors[i].color = (
- 0, self.invert + mult * w,0,1)
- elif(self.channel == 'Blue'):
- v_colors[i].color = (
- 0,0, self.invert + mult * w,1)
- i+=1
- bpy.ops.paint.vertex_paint_toggle()
- bpy.context.object.data.vertex_colors[colors_id].active_render = True
- return {'FINISHED'}
-
-class curvature_to_vertex_groups(bpy.types.Operator):
- bl_idname = "object.curvature_to_vertex_groups"
- bl_label = "Curvature"
- bl_options = {'REGISTER', 'UNDO'}
- bl_description = ("Generate a Vertex Group based on the curvature of the "
- "mesh. Is based on Dirty Vertex Color")
-
- invert : bpy.props.BoolProperty(
- name="invert", default=False, description="invert values")
-
- blur_strength : bpy.props.FloatProperty(
- name="Blur Strength", default=1, min=0.001,
- max=1, description="Blur strength per iteration")
-
- blur_iterations : bpy.props.IntProperty(
- name="Blur Iterations", default=1, min=0,
- max=40, description="Number of times to blur the values")
-
- min_angle : bpy.props.FloatProperty(
- name="Min Angle", default=0, min=0,
- max=pi/2, subtype='ANGLE', description="Minimum angle")
-
- max_angle : bpy.props.FloatProperty(
- name="Max Angle", default=pi, min=pi/2,
- max=pi, subtype='ANGLE', description="Maximum angle")
-
- invert : bpy.props.BoolProperty(
- name="Invert", default=False,
- description="Invert the curvature map")
-
- def execute(self, context):
- bpy.ops.object.mode_set(mode='OBJECT')
- bpy.ops.mesh.vertex_color_add()
- vertex_colors = bpy.context.active_object.data.vertex_colors
- vertex_colors[-1].active = True
- vertex_colors[-1].active_render = True
- vertex_colors[-1].name = "Curvature"
- for c in vertex_colors[-1].data: c.color = (1,1,1,1)
- bpy.ops.object.mode_set(mode='VERTEX_PAINT')
- bpy.ops.paint.vertex_color_dirt(
- blur_strength=self.blur_strength,
- blur_iterations=self.blur_iterations, clean_angle=self.max_angle,
- dirt_angle=self.min_angle)
- bpy.ops.object.vertex_colors_to_vertex_groups(invert=self.invert)
- bpy.ops.mesh.vertex_color_remove()
- return {'FINISHED'}
-
-
-class face_area_to_vertex_groups(bpy.types.Operator):
- bl_idname = "object.face_area_to_vertex_groups"
- bl_label = "Area"
- bl_options = {'REGISTER', 'UNDO'}
- bl_description = ("Generate a Vertex Group based on the area of individual "
- "faces")
-
- invert : bpy.props.BoolProperty(
- name="invert", default=False, description="invert values")
- bounds : bpy.props.EnumProperty(
- items=(('MANUAL', "Manual Bounds", ""),
- ('AUTOMATIC', "Automatic Bounds", "")),
- default='AUTOMATIC', name="Bounds")
-
- min_area : bpy.props.FloatProperty(
- name="Min", default=0.01, soft_min=0, soft_max=1,
- description="Faces with 0 weight")
-
- max_area : bpy.props.FloatProperty(
- name="Max", default=0.1, soft_min=0, soft_max=1,
- description="Faces with 1 weight")
-
- def draw(self, context):
- layout = self.layout
- layout.label(text="Bounds")
- layout.prop(self, "bounds", text="")
- if self.bounds == 'MANUAL':
- layout.prop(self, "min_area")
- layout.prop(self, "max_area")
-
- def execute(self, context):
- try: ob = context.object
- except:
- self.report({'ERROR'}, "Please select an Object")
- return {'CANCELLED'}
- ob.vertex_groups.new(name="Faces Area")
-
- areas = [[] for v in ob.data.vertices]
-
- for p in ob.data.polygons:
- for v in p.vertices:
- areas[v].append(p.area)
-
- for i in range(len(areas)):
- areas[i] = mean(areas[i])
- if self.bounds == 'MANUAL':
- min_area = self.min_area
- max_area = self.max_area
- elif self.bounds == 'AUTOMATIC':
- min_area = min(areas)
- max_area = max(areas)
- elif self.bounds == 'COMPRESSION':
- min_area = 1
- max_area = min(areas)
- elif self.bounds == 'TENSION':
- min_area = 1
- max_area = max(areas)
- delta_area = max_area - min_area
- if delta_area == 0:
- delta_area = 0.0001
- if self.bounds == 'MANUAL':
- delta_area = 0.0001
- else:
- self.report({'ERROR'}, "The faces have the same areas")
- #return {'CANCELLED'}
- for i in range(len(areas)):
- weight = (areas[i] - min_area)/delta_area
- ob.vertex_groups[-1].add([i], weight, 'REPLACE')
- ob.vertex_groups.update()
- ob.data.update()
- bpy.ops.object.mode_set(mode='WEIGHT_PAINT')
- return {'FINISHED'}
-
-
-class harmonic_weight(bpy.types.Operator):
- bl_idname = "object.harmonic_weight"
- bl_label = "Harmonic"
- bl_options = {'REGISTER', 'UNDO'}
- bl_description = ("Create an harmonic variation of the active Vertex Group")
-
- freq : bpy.props.FloatProperty(
- name="Frequency", default=20, soft_min=0,
- soft_max=100, description="Wave frequency")
-
- amp : bpy.props.FloatProperty(
- name="Amplitude", default=1, soft_min=0,
- soft_max=10, description="Wave amplitude")
-
- midlevel : bpy.props.FloatProperty(
- name="Midlevel", default=0, min=-1,
- max=1, description="Midlevel")
-
- add : bpy.props.FloatProperty(
- name="Add", default=0, min=-1,
- max=1, description="Add to the Weight")
-
- mult : bpy.props.FloatProperty(
- name="Multiply", default=0, min=0,
- max=1, description="Multiply for he Weight")
-
- @classmethod
- def poll(cls, context):
- return len(context.object.vertex_groups) > 0
-
- def execute(self, context):
- ob = bpy.context.active_object
- if len(ob.vertex_groups) > 0:
- group_id = ob.vertex_groups.active_index
- ob.vertex_groups.new(name="Harmonic")
- for i in range(len(ob.data.vertices)):
- try: val = ob.vertex_groups[group_id].weight(i)
- except: val = 0
- weight = self.amp*(sin(val*self.freq) - self.midlevel)/2 + 0.5 + self.add*val*(1-(1-val)*self.mult)
- ob.vertex_groups[-1].add([i], weight, 'REPLACE')
- ob.data.update()
- else:
- self.report({'ERROR'}, "Active object doesn't have vertex groups")
- return {'CANCELLED'}
- bpy.ops.object.mode_set(mode='WEIGHT_PAINT')
- return {'FINISHED'}
-
-
-
-class TISSUE_PT_color(bpy.types.Panel):
- bl_label = "Tissue Tools"
- bl_category = "Tissue"
- bl_space_type = "VIEW_3D"
- bl_region_type = "UI"
- #bl_options = {'DEFAULT_CLOSED'}
- bl_context = "vertexpaint"
-
- def draw(self, context):
- layout = self.layout
- col = layout.column(align=True)
- col.operator("object.vertex_colors_to_vertex_groups",
- icon="GROUP_VERTEX", text="Convert to Weight")
-
-class TISSUE_PT_weight(bpy.types.Panel):
- bl_label = "Tissue Tools"
- bl_category = "Tissue"
- bl_space_type = "VIEW_3D"
- bl_region_type = "UI"
- #bl_options = {'DEFAULT_CLOSED'}
- bl_context = "weightpaint"
-
- def draw(self, context):
- layout = self.layout
- col = layout.column(align=True)
- #if context.object.type == 'MESH' and context.mode == 'OBJECT':
- #col.label(text="Transform:")
- #col.separator()
- #elif bpy.context.mode == 'PAINT_WEIGHT':
- col.label(text="Weight Generate:")
- #col.operator(
- # "object.vertex_colors_to_vertex_groups", icon="GROUP_VCOL")
- col.operator("object.face_area_to_vertex_groups", icon="FACESEL")
- col.operator("object.curvature_to_vertex_groups", icon="SMOOTHCURVE")
- try: col.operator("object.weight_formula", icon="CON_TRANSFORM")
- except: col.operator("object.weight_formula")#, icon="CON_TRANSFORM")
- #col.label(text="Weight Processing:")
- col.separator()
-
- # TO BE FIXED
- #col.operator("object.weight_laplacian", icon="SMOOTHCURVE")
-
- col.operator("object.harmonic_weight", icon="IPO_ELASTIC")
- col.operator("object.vertex_group_to_vertex_colors", icon="GROUP_VCOL",
- text="Convert to Colors")
- col.separator()
- col.label(text="Deformation Analysis:")
- col.operator("object.edges_deformation", icon="DRIVER_DISTANCE")#FULLSCREEN_ENTER")
- col.operator("object.edges_bending", icon="DRIVER_ROTATIONAL_DIFFERENCE")#"MOD_SIMPLEDEFORM")
- col.separator()
- col.label(text="Weight Contour:")
- col.operator("object.weight_contour_curves", icon="MOD_CURVE")
- col.operator("object.weight_contour_displace", icon="MOD_DISPLACE")
- col.operator("object.weight_contour_mask", icon="MOD_MASK")
- col.separator()
- col.label(text="Simulations:")
- #col.operator("object.reaction_diffusion", icon="MOD_OCEAN")
- col.operator("object.start_reaction_diffusion",
- icon="EXPERIMENTAL",
- text="Reaction-Diffusion")
-
- #col.prop(context.object, "reaction_diffusion_run", icon="PLAY", text="Run Simulation")
- ####col.prop(context.object, "reaction_diffusion_run")
- #col.separator()
- #col.label(text="Vertex Color from:")
- #col.operator("object.vertex_group_to_vertex_colors", icon="GROUP_VERTEX")
-
-
-
-
-class start_reaction_diffusion(bpy.types.Operator):
- bl_idname = "object.start_reaction_diffusion"
- bl_label = "Start Reaction Diffusion"
- bl_description = ("Run a Reaction-Diffusion based on existing Vertex Groups: A and B")
- bl_options = {'REGISTER', 'UNDO'}
-
- run : bpy.props.BoolProperty(
- name="Run Reaction-Diffusion", default=True, description="Compute a new iteration on frame changes")
-
- time_steps : bpy.props.IntProperty(
- name="Steps", default=10, min=0, soft_max=50,
- description="Number of Steps")
-
- dt : bpy.props.FloatProperty(
- name="dt", default=1, min=0, soft_max=0.2,
- description="Time Step")
-
- diff_a : bpy.props.FloatProperty(
- name="Diff A", default=0.18, min=0, soft_max=2,
- description="Diffusion A")
-
- diff_b : bpy.props.FloatProperty(
- name="Diff B", default=0.09, min=0, soft_max=2,
- description="Diffusion B")
-
- f : bpy.props.FloatProperty(
- name="f", default=0.055, min=0, soft_max=0.5, precision=4,
- description="Feed Rate")
-
- k : bpy.props.FloatProperty(
- name="k", default=0.062, min=0, soft_max=0.5, precision=4,
- description="Kill Rate")
-
- @classmethod
- def poll(cls, context):
- return context.object.type == 'MESH'
-
- def execute(self, context):
- reaction_diffusion_add_handler(self, context)
- set_animatable_fix_handler(self, context)
-
- ob = context.object
-
- ob.reaction_diffusion_settings.run = self.run
- ob.reaction_diffusion_settings.dt = self.dt
- ob.reaction_diffusion_settings.time_steps = self.time_steps
- ob.reaction_diffusion_settings.f = self.f
- ob.reaction_diffusion_settings.k = self.k
- ob.reaction_diffusion_settings.diff_a = self.diff_a
- ob.reaction_diffusion_settings.diff_b = self.diff_b
-
-
- # check vertex group A
- try:
- vg = ob.vertex_groups['A']
- except:
- ob.vertex_groups.new(name='A')
- # check vertex group B
- try:
- vg = ob.vertex_groups['B']
- except:
- ob.vertex_groups.new(name='B')
-
- for v in ob.data.vertices:
- ob.vertex_groups['A'].add([v.index], 1, 'REPLACE')
- ob.vertex_groups['B'].add([v.index], 0, 'REPLACE')
-
- ob.vertex_groups.update()
- ob.data.update()
- bpy.ops.object.mode_set(mode='WEIGHT_PAINT')
-
- return {'FINISHED'}
-
-class reset_reaction_diffusion_weight(bpy.types.Operator):
- bl_idname = "object.reset_reaction_diffusion_weight"
- bl_label = "Reset Reaction Diffusion Weight"
- bl_description = ("Set A and B weight to default values")
- bl_options = {'REGISTER', 'UNDO'}
-
- @classmethod
- def poll(cls, context):
- return context.object.type == 'MESH'
-
- def execute(self, context):
- reaction_diffusion_add_handler(self, context)
- set_animatable_fix_handler(self, context)
-
- ob = context.object
-
- # check vertex group A
- try:
- vg = ob.vertex_groups['A']
- except:
- ob.vertex_groups.new(name='A')
- # check vertex group B
- try:
- vg = ob.vertex_groups['B']
- except:
- ob.vertex_groups.new(name='B')
-
- for v in ob.data.vertices:
- ob.vertex_groups['A'].add([v.index], 1, 'REPLACE')
- ob.vertex_groups['B'].add([v.index], 0, 'REPLACE')
-
- ob.vertex_groups.update()
- ob.data.update()
- bpy.ops.object.mode_set(mode='WEIGHT_PAINT')
-
- return {'FINISHED'}
-
-from bpy.app.handlers import persistent
-
-@persistent
-def reaction_diffusion_def_blur(scene):
- for ob in scene.objects:
- if ob.reaction_diffusion_settings.run:
- #try:
- me = ob.data
- bm = bmesh.new()
- bm.from_mesh(me)
- bm.edges.ensure_lookup_table()
-
- # store weight values
- a = []
- b = []
- for v in me.vertices:
- try:
- a.append(ob.vertex_groups["A"].weight(v.index))
- except:
- a.append(0)
- try:
- b.append(ob.vertex_groups["B"].weight(v.index))
- except:
- b.append(0)
-
- a = array(a)
- b = array(b)
- props = ob.reaction_diffusion_settings
- dt = props.dt
- time_steps = props.time_steps
- f = props.f
- k = props.k
- diff_a = props.diff_a * props.diff_mult
- diff_b = props.diff_b * props.diff_mult
-
- n_verts = len(bm.verts)
- #bpy.ops.object.mode_set(mode='WEIGHT_PAINT')
- #ob.data.use_paint_mask_vertex = True
-
- for i in range(time_steps):
- ab2 = a*b**2
- ob.vertex_groups.active = ob.vertex_groups['A']
- bpy.ops.object.vertex_group_smooth(group_select_mode='ACTIVE', factor=diff_a)
- ob.vertex_groups.active = ob.vertex_groups['B']
- bpy.ops.object.vertex_group_smooth(group_select_mode='ACTIVE', factor=diff_b)
-
- a = []
- b = []
- for v in me.vertices:
- a.append(ob.vertex_groups["A"].weight(v.index))
- b.append(ob.vertex_groups["B"].weight(v.index))
- a = array(a)
- b = array(b)
-
- a += - (ab2 + f*(1-a))*dt
- b += (ab2 - (k+f)*b)*dt
-
- a = nan_to_num(a)
- b = nan_to_num(b)
-
- for i in range(n_verts):
- ob.vertex_groups['A'].add([i], a[i], 'REPLACE')
- ob.vertex_groups['B'].add([i], b[i], 'REPLACE')
- ob.vertex_groups.update()
- ob.data.update()
- #bpy.ops.object.mode_set(mode='EDIT')
- #bpy.ops.object.mode_set(mode='WEIGHT_PAINT
- #bpy.ops.paint.weight_paint_toggle()
- #bpy.ops.paint.weight_paint_toggle()
-
- #bpy.ops.object.mode_set(mode='WEIGHT_PAINT')
- #except:
- # pass
-
-def reaction_diffusion_def_(scene):
- for ob in scene.objects:
- if ob.reaction_diffusion_settings.run:
- #try:
- me = ob.data
- bm = bmesh.new()
- bm.from_mesh(me)
- bm.edges.ensure_lookup_table()
-
- # store weight values
- a = []
- b = []
- for v in me.vertices:
- try:
- a.append(ob.vertex_groups["A"].weight(v.index))
- except:
- a.append(0)
- try:
- b.append(ob.vertex_groups["B"].weight(v.index))
- except:
- b.append(0)
-
- a = array(a)
- b = array(b)
- props = ob.reaction_diffusion_settings
- dt = props.dt
- time_steps = props.time_steps
- f = props.f
- k = props.k
- diff_a = props.diff_a * props.diff_mult
- diff_b = props.diff_b * props.diff_mult
-
- n_verts = len(bm.verts)
- for i in range(time_steps):
- lap_a = zeros((n_verts))#[0]*n_verts
- lap_b = zeros((n_verts))#[0]*n_verts
- if i == 0:
- lap_map = [[] for i in range(n_verts)]
- lap_mult = []
- for e in bm.edges:
- id0 = e.verts[0].index
- id1 = e.verts[1].index
- lap_map[id0].append(id1)
- lap_map[id1].append(id0)
- for id in range(n_verts):
- lap_mult.append(len(lap_map[id]))
- lap_mult = array(lap_mult)
- lap_map = array(lap_map)
- for id in range(n_verts):
- map = lap_map[id]
- lap_a[id] = a[lap_map[id]].sum()
- lap_b[id] = b[lap_map[id]].sum()
- lap_a -= a*lap_mult
- lap_b -= b*lap_mult
- ab2 = a*b**2
-
- a += (diff_a*lap_a - ab2 + f*(1-a))*dt
- b += (diff_b*lap_b + ab2 - (k+f)*b)*dt
-
- a = nan_to_num(a)
- b = nan_to_num(b)
-
- for i in range(n_verts):
- ob.vertex_groups['A'].add([i], a[i], 'REPLACE')
- ob.vertex_groups['B'].add([i], b[i], 'REPLACE')
- ob.vertex_groups.update()
- ob.data.update()
- #bpy.ops.object.mode_set(mode='EDIT')
- #bpy.ops.object.mode_set(mode='WEIGHT_PAINT')
- bpy.ops.paint.weight_paint_toggle()
- bpy.ops.paint.weight_paint_toggle()
-
- #bpy.ops.object.mode_set(mode='WEIGHT_PAINT')
- #except:
- # pass
-
-def reaction_diffusion_def(scene):
- for ob in scene.objects:
- if ob.reaction_diffusion_settings.run:
-
- start = time.time()
-
- me = ob.data
- n_edges = len(me.edges)
- n_verts = len(me.vertices)
-
- # store weight values
- a = np.zeros(n_verts)
- b = np.zeros(n_verts)
- #a = thread_read_weight(a, ob.vertex_groups["A"])
- #b = thread_read_weight(b, ob.vertex_groups["B"])
- #a = read_weight(a, ob.vertex_groups["A"])
- #b = read_weight(b, ob.vertex_groups["B"])
-
- for i in range(n_verts):
- try: a[i] = ob.vertex_groups["A"].weight(i)
- except: pass
- try: b[i] = ob.vertex_groups["B"].weight(i)
- except: pass
-
- props = ob.reaction_diffusion_settings
- dt = props.dt
- time_steps = props.time_steps
- f = props.f
- k = props.k
- diff_a = props.diff_a * props.diff_mult
- diff_b = props.diff_b * props.diff_mult
-
- edge_verts = [0]*n_edges*2
- me.edges.foreach_get("vertices", edge_verts)
-
- timeElapsed = time.time() - start
- print('RD - Preparation Time:',timeElapsed)
- start = time.time()
-
- try:
- edge_verts = np.array(edge_verts)
- a, b = numba_reaction_diffusion(n_verts, n_edges, edge_verts, a, b, diff_a, diff_b, f, k, dt, time_steps)
- a = nan_to_num(a)
- b = nan_to_num(b)
- except:
- edge_verts = np.array(edge_verts)
- arr = np.arange(n_edges)*2
- id0 = edge_verts[arr] # first vertex indices for each edge
- id1 = edge_verts[arr+1] # second vertex indices for each edge
- for i in range(time_steps):
- lap_a = np.zeros(n_verts)
- lap_b = np.zeros(n_verts)
- lap_a0 = a[id1] - a[id0] # laplacian increment for first vertex of each edge
- lap_b0 = b[id1] - b[id0] # laplacian increment for first vertex of each edge
-
- for i, j, la0, lb0 in np.nditer([id0,id1,lap_a0,lap_b0]):
- lap_a[i] += la0
- lap_b[i] += lb0
- lap_a[j] -= la0
- lap_b[j] -= lb0
- ab2 = a*b**2
- a += eval("(diff_a*lap_a - ab2 + f*(1-a))*dt")
- b += eval("(diff_b*lap_b + ab2 - (k+f)*b)*dt")
- #a += (diff_a*lap_a - ab2 + f*(1-a))*dt
- #b += (diff_b*lap_b + ab2 - (k+f)*b)*dt
-
- a = nan_to_num(a)
- b = nan_to_num(b)
-
- timeElapsed = time.time() - start
- print('RD - Simulation Time:',timeElapsed)
- start = time.time()
-
- for i in range(n_verts):
- ob.vertex_groups['A'].add([i], a[i], 'REPLACE')
- ob.vertex_groups['B'].add([i], b[i], 'REPLACE')
-
- for ps in ob.particle_systems:
- if ps.vertex_group_density == 'B' or ps.vertex_group_density == 'A':
- ps.invert_vertex_group_density = not ps.invert_vertex_group_density
- ps.invert_vertex_group_density = not ps.invert_vertex_group_density
-
- timeElapsed = time.time() - start
- print('RD - Closing Time:',timeElapsed)
-
-class TISSUE_PT_reaction_diffusion(Panel):
- bl_space_type = 'PROPERTIES'
- bl_region_type = 'WINDOW'
- bl_context = "data"
- bl_label = "Tissue - Reaction-Diffusion"
- bl_options = {'DEFAULT_CLOSED'}
-
- @classmethod
- def poll(cls, context):
- return 'A' and 'B' in context.object.vertex_groups
-
- def draw(self, context):
- reaction_diffusion_add_handler(self, context)
-
- ob = context.object
- props = ob.reaction_diffusion_settings
- layout = self.layout
- col = layout.column(align=True)
- row = col.row(align=True)
- if not ("A" and "B" in ob.vertex_groups):
- row.operator("object.start_reaction_diffusion",
- icon="EXPERIMENTAL",
- text="Reaction-Diffusion")
- else:
- row.operator("object.start_reaction_diffusion",
- icon="EXPERIMENTAL",
- text="Reset Reaction-Diffusion")
- row = col.row(align=True)
- row.prop(props, "run", text="Run Reaction-Diffusion")
- col = layout.column(align=True)
- row = col.row(align=True)
- row.prop(props, "time_steps")
- row.prop(props, "dt")
- col.separator()
- row = col.row(align=True)
- row.prop(props, "diff_a")
- row.prop(props, "diff_b")
- row = col.row(align=True)
- row.prop(props, "diff_mult")
- #col.separator()
- row = col.row(align=True)
- row.prop(props, "f")
- row.prop(props, "k")
diff --git a/mesh_tissue/config.py b/mesh_tissue/config.py
new file mode 100644
index 00000000..380cb281
--- /dev/null
+++ b/mesh_tissue/config.py
@@ -0,0 +1,63 @@
+import bpy
+from bpy.props import (
+ IntProperty,
+ BoolProperty
+ )
+
+evaluatedDepsgraph = None
+
+class tissuePreferences(bpy.types.AddonPreferences):
+ bl_idname = __package__
+
+ print_stats : IntProperty(
+ name="Print Stats",
+ description="Print in the console all details about the computing time.",
+ default=1,
+ min=0,
+ max=4
+ )
+
+ use_numba_tess : BoolProperty(
+ name="Numba Tessellate",
+ description="Boost the Tessellation using Numba module. It will be slower during the first execution",
+ default=True
+ )
+
+ def draw(self, context):
+
+ from .utils_pip import Pip
+ Pip._ensure_user_site_package()
+ layout = self.layout
+ layout.prop(self, "print_stats")
+ import importlib
+ numba_spec = importlib.util.find_spec('numba')
+ found = numba_spec is not None
+ if found:
+ layout.label(text='Numba module installed correctly!', icon='INFO')
+ layout.prop(self, "use_numba_tess")
+ else:
+ layout.label(text='Numba module not installed!', icon='ERROR')
+ layout.label(text='Installing Numba will make Tissue faster', icon='INFO')
+ row = layout.row()
+ row.operator('scene.tissue_install_numba')
+ layout.label(text='Internet connection required. It may take few minutes', icon='URL')
+
+class tissue_install_numba(bpy.types.Operator):
+ bl_idname = "scene.tissue_install_numba"
+ bl_label = "Install Numba"
+ bl_description = ("Install Numba python module")
+ bl_options = {'REGISTER'}
+
+ def execute(self, context):
+ try:
+ from .utils_pip import Pip
+ #Pip.upgrade_pip()
+ Pip.install('llvmlite')
+ Pip.install('numba')
+ from numba import jit, njit, guvectorize, float64, int32, prange
+ bool_numba = True
+ print('Tissue: Numba successfully installed!')
+ self.report({'INFO'}, 'Tissue: Numba successfully installed!')
+ except:
+ print('Tissue: Numba not loaded correctly. Try restarting Blender')
+ return {'FINISHED'}
diff --git a/mesh_tissue/curves_tools.py b/mesh_tissue/curves_tools.py
new file mode 100644
index 00000000..5f6e07a4
--- /dev/null
+++ b/mesh_tissue/curves_tools.py
@@ -0,0 +1,803 @@
+# ##### BEGIN GPL LICENSE BLOCK #####
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License
+# as published by the Free Software Foundation; either version 2
+# of the License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+#
+# ##### END GPL LICENSE BLOCK #####
+
+# #
+# (c) Alessandro Zomparelli #
+# (2017) #
+# #
+# http://www.co-de-it.com/ #
+# #
+# ############################################################################ #
+
+
+import bpy, bmesh
+from bpy.types import Operator
+from bpy.props import (
+ IntProperty,
+ BoolProperty,
+ EnumProperty,
+ PointerProperty,
+ StringProperty,
+ FloatProperty
+ )
+from bpy.types import (
+ Operator,
+ Panel,
+ PropertyGroup,
+ )
+
+import numpy as np
+from mathutils import Vector
+from math import pi
+from .utils import (
+ find_curves,
+ update_curve_from_pydata,
+ simple_to_mesh,
+ convert_object_to_mesh,
+ get_weight_numpy,
+ loops_from_bmesh,
+ get_mesh_before_subs
+ )
+import time
+
+
+def anim_curve_active(self, context):
+ ob = context.object
+ props = ob.tissue_to_curve
+ try:
+ props.object.name
+ if not ob.tissue.bool_lock:
+ bpy.ops.object.tissue_convert_to_curve_update()
+ except: pass
+
+
+class tissue_to_curve_prop(PropertyGroup):
+ object : PointerProperty(
+ type=bpy.types.Object,
+ name="",
+ description="Source object",
+ update = anim_curve_active
+ )
+ bool_smooth : BoolProperty(
+ name="Smooth Shading",
+ default=True,
+ description="Output faces with smooth shading rather than flat shaded",
+ update = anim_curve_active
+ )
+ bool_lock : BoolProperty(
+ name="Lock",
+ description="Prevent automatic update on settings changes or if other objects have it in the hierarchy.",
+ default=False,
+ update = anim_curve_active
+ )
+ bool_dependencies : BoolProperty(
+ name="Update Dependencies",
+ description="Automatically updates source object as well, when possible",
+ default=False,
+ update = anim_curve_active
+ )
+ bool_run : BoolProperty(
+ name="Animatable Curve",
+ description="Automatically recompute the conversion when the frame is changed.",
+ default = False
+ )
+ use_modifiers : BoolProperty(
+ name="Use Modifiers",
+ default=True,
+ description="Automatically apply Modifiers and Shape Keys",
+ update = anim_curve_active
+ )
+ subdivision_mode : EnumProperty(
+ items=(
+ ('ALL', "All", ""),
+ ('CAGE', "Cage", ""),
+ ('INNER', "Inner", "")
+ ),
+ default='CAGE',
+ name="Subdivided Edges",
+ update = anim_curve_active
+ )
+ use_endpoint_u : BoolProperty(
+ name="Endpoint U",
+ default=True,
+ description="Make all open nurbs curve meet the endpoints",
+ update = anim_curve_active
+ )
+ clean_distance : FloatProperty(
+ name="Merge Distance", default=0, min=0, soft_max=10,
+ description="Merge Distance",
+ update = anim_curve_active
+ )
+ nurbs_order : IntProperty(
+ name="Order", default=4, min=2, max=6,
+ description="Nurbs order",
+ update = anim_curve_active
+ )
+ system : IntProperty(
+ name="System", default=0, min=0,
+ description="Particle system index",
+ update = anim_curve_active
+ )
+ bounds_selection : EnumProperty(
+ items=(
+ ('ALL', "All", ""),
+ ('BOUNDS', "Boundaries", ""),
+ ('INNER', "Inner", "")
+ ),
+ default='ALL',
+ name="Boundary Selection",
+ update = anim_curve_active
+ )
+ periodic_selection : EnumProperty(
+ items=(
+ ('ALL', "All", ""),
+ ('OPEN', "Open", ""),
+ ('CLOSED', "Closed", "")
+ ),
+ default='ALL',
+ name="Periodic Selection",
+ update = anim_curve_active
+ )
+ spline_type : EnumProperty(
+ items=(
+ ('POLY', "Poly", ""),
+ ('BEZIER', "Bezier", ""),
+ ('NURBS', "NURBS", "")
+ ),
+ default='POLY',
+ name="Spline Type",
+ update = anim_curve_active
+ )
+ mode : EnumProperty(
+ items=(
+ ('LOOPS', "Loops", ""),
+ ('EDGES', "Edges", ""),
+ ('PARTICLES', "Particles", "")
+ ),
+ default='LOOPS',
+ name="Conversion Mode",
+ update = anim_curve_active
+ )
+ vertex_group : StringProperty(
+ name="Radius", default='',
+ description="Vertex Group used for variable radius",
+ update = anim_curve_active
+ )
+ invert_vertex_group : BoolProperty(default=False,
+ description='Inverte the value of the Vertex Group',
+ update = anim_curve_active
+ )
+ vertex_group_factor : FloatProperty(
+ name="Factor",
+ default=0,
+ min=0,
+ max=1,
+ description="Depth bevel factor to use for zero vertex group influence",
+ update = anim_curve_active
+ )
+ only_sharp : BoolProperty(
+ default=False,
+ name="Only Sharp Edges",
+ description='Convert only Sharp edges',
+ update = anim_curve_active
+ )
+ pattern_depth : FloatProperty(
+ name="Depth",
+ default=0.02,
+ min=0,
+ soft_max=10,
+ description="Displacement pattern depth",
+ update = anim_curve_active
+ )
+ pattern_offset : FloatProperty(
+ name="Offset",
+ default=0,
+ soft_min=-1,
+ soft_max=1,
+ description="Displacement pattern offset",
+ update = anim_curve_active
+ )
+ pattern0 : IntProperty(
+ name="Step 0",
+ default=0,
+ min=0,
+ soft_max=10,
+ description="Pattern step 0",
+ update = anim_curve_active
+ )
+ pattern1 : IntProperty(
+ name="Step 1",
+ default=0,
+ min=0,
+ soft_max=10,
+ description="Pattern step 1",
+ update = anim_curve_active
+ )
+
+class tissue_convert_to_curve(Operator):
+ bl_idname = "object.tissue_convert_to_curve"
+ bl_label = "Tissue Convert to Curve"
+ bl_description = "Convert selected mesh to Curve object"
+ bl_options = {'REGISTER', 'UNDO'}
+
+ object : StringProperty(
+ name="",
+ description="Source object",
+ default = ""
+ )
+ bool_smooth : BoolProperty(
+ name="Smooth Shading",
+ default=True,
+ description="Output faces with smooth shading rather than flat shaded"
+ )
+ use_modifiers : BoolProperty(
+ name="Use Modifiers",
+ default=True,
+ description="Automatically apply Modifiers and Shape Keys"
+ )
+ subdivision_mode : EnumProperty(
+ items=(
+ ('ALL', "All", ""),
+ ('CAGE', "Cage", ""),
+ ('INNER', "Inner", "")
+ ),
+ default='CAGE',
+ name="Subdivided Edges"
+ )
+ use_endpoint_u : BoolProperty(
+ name="Endpoint U",
+ default=True,
+ description="Make all open nurbs curve meet the endpoints"
+ )
+ nurbs_order : IntProperty(
+ name="Order", default=4, min=2, max=6,
+ description="Nurbs order"
+ )
+ system : IntProperty(
+ name="System", default=0, min=0,
+ description="Particle system index"
+ )
+ clean_distance : FloatProperty(
+ name="Merge Distance", default=0, min=0, soft_max=10,
+ description="Merge Distance"
+ )
+ spline_type : EnumProperty(
+ items=(
+ ('POLY', "Poly", ""),
+ ('BEZIER', "Bezier", ""),
+ ('NURBS', "NURBS", "")
+ ),
+ default='POLY',
+ name="Spline Type"
+ )
+ bounds_selection : EnumProperty(
+ items=(
+ ('ALL', "All", ""),
+ ('BOUNDS', "Boundaries", ""),
+ ('INNER', "Inner", "")
+ ),
+ default='ALL',
+ name="Boundary Selection"
+ )
+ periodic_selection : EnumProperty(
+ items=(
+ ('ALL', "All", ""),
+ ('OPEN', "Open", ""),
+ ('CLOSED', "Closed", "")
+ ),
+ default='ALL',
+ name="Periodic Selection"
+ )
+ mode : EnumProperty(
+ items=(
+ ('LOOPS', "Loops", ""),
+ ('EDGES', "Edges", ""),
+ ('PARTICLES', "Particles", "")
+ ),
+ default='LOOPS',
+ name="Conversion Mode"
+ )
+ vertex_group : StringProperty(
+ name="Radius", default='',
+ description="Vertex Group used for variable radius"
+ )
+ invert_vertex_group : BoolProperty(default=False,
+ description='Inverte the value of the Vertex Group'
+ )
+ vertex_group_factor : FloatProperty(
+ name="Factor",
+ default=0,
+ min=0,
+ max=1,
+ description="Depth bevel factor to use for zero vertex group influence"
+ )
+ only_sharp : BoolProperty(
+ default=False,
+ name="Only Sharp Edges",
+ description='Convert only Sharp edges'
+ )
+ pattern_depth : FloatProperty(
+ name="Depth",
+ default=0.02,
+ min=0,
+ soft_max=10,
+ description="Displacement pattern depth"
+ )
+ pattern_offset : FloatProperty(
+ name="Offset",
+ default=0,
+ soft_min=-1,
+ soft_max=1,
+ description="Displacement pattern offset"
+ )
+ pattern0 : IntProperty(
+ name="Step 0",
+ default=0,
+ min=0,
+ soft_max=10,
+ description="Pattern step 0"
+ )
+ pattern1 : IntProperty(
+ name="Step 1",
+ default=0,
+ min=0,
+ soft_max=10,
+ description="Pattern step 1"
+ )
+
+ @classmethod
+ def poll(cls, context):
+ try:
+ #bool_tessellated = context.object.tissue_tessellate.generator != None
+ ob = context.object
+ return ob.type in ('MESH','CURVE','SURFACE','FONT') and ob.mode == 'OBJECT'# and bool_tessellated
+ except:
+ return False
+
+ def invoke(self, context, event):
+ self.object = context.object.name
+ return context.window_manager.invoke_props_dialog(self)
+
+ def draw(self, context):
+ ob = context.object
+ ob0 = bpy.data.objects[self.object]
+ #props = ob.tissue_to_curve
+ layout = self.layout
+ col = layout.column(align=True)
+ row = col.row(align=True)
+ #row.label(text='Object: ' + self.object)
+ #row.prop_search(self, "object", context.scene, "objects")
+ #row.prop(self, "use_modifiers")#, icon='MODIFIER', text='')
+ col.separator()
+ col.label(text='Conversion Mode:')
+ row = col.row(align=True)
+ row.prop(
+ self, "mode", text="Conversion Mode", icon='NONE', expand=True,
+ slider=False, toggle=False, icon_only=False, event=False,
+ full_event=False, emboss=True, index=-1)
+ if self.mode == 'PARTICLES':
+ col.separator()
+ col.prop(self, "system")
+ col.separator()
+ if self.mode in ('LOOPS', 'EDGES'):
+ row = col.row(align=True)
+ row.prop(self, "use_modifiers")
+ col2 = row.column(align=True)
+ if self.use_modifiers:
+ col2.prop(self, "subdivision_mode", text='', icon='NONE', expand=False,
+ slider=True, toggle=False, icon_only=False, event=False,
+ full_event=False, emboss=True, index=-1)
+ col2.enabled = False
+ for m in bpy.data.objects[self.object].modifiers:
+ if m.type in ('SUBSURF','MULTIRES'): col2.enabled = True
+ col.separator()
+ row = col.row(align=True)
+ row.label(text='Filter Edges:')
+ col2 = row.column(align=True)
+ col2.prop(self, "bounds_selection", text='', icon='NONE', expand=False,
+ slider=True, toggle=False, icon_only=False, event=False,
+ full_event=False, emboss=True, index=-1)
+ col2.prop(self, 'only_sharp')
+ col.separator()
+ if self.mode == 'LOOPS':
+ row = col.row(align=True)
+ row.label(text='Filter Loops:')
+ row.prop(self, "periodic_selection", text='', icon='NONE', expand=False,
+ slider=True, toggle=False, icon_only=False, event=False,
+ full_event=False, emboss=True, index=-1)
+ col.separator()
+ col.label(text='Spline Type:')
+ row = col.row(align=True)
+ row.prop(
+ self, "spline_type", text="Spline Type", icon='NONE', expand=True,
+ slider=False, toggle=False, icon_only=False, event=False,
+ full_event=False, emboss=True, index=-1)
+ if self.spline_type == 'NURBS':
+ col.separator()
+ col.label(text='Nurbs splines:')
+ row = col.row(align=True)
+ row.prop(self, "use_endpoint_u")
+ row.prop(self, "nurbs_order")
+ col.separator()
+ col.prop(self, "bool_smooth")
+ if ob0.type == 'MESH' and self.mode != 'PARTICLES':
+ col.separator()
+ col.label(text='Variable Radius:')
+ row = col.row(align=True)
+ row.prop_search(self, 'vertex_group', ob0, "vertex_groups", text='')
+ row.prop(self, "invert_vertex_group", text="", toggle=True, icon='ARROW_LEFTRIGHT')
+ row.prop(self, "vertex_group_factor")
+ col.separator()
+ col.label(text='Clean curves:')
+ col.prop(self, "clean_distance")
+ col.separator()
+ col.label(text='Displacement Pattern:')
+ row = col.row(align=True)
+ row.prop(self, "pattern0")
+ row.prop(self, "pattern1")
+ row = col.row(align=True)
+ row.prop(self, "pattern_depth")
+ row.prop(self, "pattern_offset")
+
+ def execute(self, context):
+ ob = context.active_object
+
+ crv = bpy.data.curves.new(ob.name + '_Curve', type='CURVE')
+ crv.dimensions = '3D'
+ new_ob = bpy.data.objects.new(ob.name + '_Curve', crv)
+ bpy.context.collection.objects.link(new_ob)
+ context.view_layer.objects.active = new_ob
+
+ new_ob.select_set(True)
+ ob.select_set(False)
+ new_ob.matrix_world = ob.matrix_world
+
+ new_ob.tissue.tissue_type = 'TO_CURVE'
+ new_ob.tissue.bool_lock = True
+
+ props = new_ob.tissue_to_curve
+ props.object = ob
+ props.use_modifiers = self.use_modifiers
+ props.subdivision_mode = self.subdivision_mode
+ props.clean_distance = self.clean_distance
+ props.spline_type = self.spline_type
+ props.mode = self.mode
+ props.use_endpoint_u = self.use_endpoint_u
+ props.nurbs_order = self.nurbs_order
+ props.vertex_group = self.vertex_group
+ props.vertex_group_factor = self.vertex_group_factor
+ props.invert_vertex_group = self.invert_vertex_group
+ props.bool_smooth = self.bool_smooth
+ props.system = self.system
+ props.periodic_selection = self.periodic_selection
+ props.bounds_selection = self.bounds_selection
+ props.only_sharp = self.only_sharp
+ props.pattern0 = self.pattern0
+ props.pattern1 = self.pattern1
+ props.pattern_depth = self.pattern_depth
+ props.pattern_offset = self.pattern_offset
+
+ new_ob.tissue.bool_lock = False
+
+ bpy.ops.object.tissue_convert_to_curve_update()
+
+ return {'FINISHED'}
+
+class tissue_convert_to_curve_update(Operator):
+ bl_idname = "object.tissue_convert_to_curve_update"
+ bl_label = "Tissue Update Curve"
+ bl_description = "Update Curve object"
+ bl_options = {'REGISTER', 'UNDO'}
+
+ @classmethod
+ def poll(cls, context):
+ try:
+ ob = context.object
+ bool_curve = ob.tissue_to_curve.object != None
+ return ob.type == 'CURVE' and ob.mode == 'OBJECT' and bool_curve
+ except:
+ return False
+
+ def execute(self, context):
+ start_time = time.time()
+
+ ob = context.object
+ props = ob.tissue_to_curve
+ ob0 = props.object
+ if props.mode == 'PARTICLES':
+ eval_ob = ob0.evaluated_get(context.evaluated_depsgraph_get())
+ system_id = min(props.system, len(eval_ob.particle_systems))
+ psystem = eval_ob.particle_systems[system_id]
+ ob.data.splines.clear()
+ particles = psystem.particles
+ for id,p in enumerate(particles):
+ s = ob.data.splines.new('POLY')
+ if psystem.settings.type == 'HAIR':
+ n_pts = len(p.hair_keys)
+ pts = [0]*3*n_pts
+ p.hair_keys.foreach_get('co',pts)
+ co = np.array(pts).reshape((-1,3))
+ else:
+ n_pts = 2**psystem.settings.display_step + 1
+ pts = []
+ for i in range(n_pts):
+ vec = psystem.co_hair(eval_ob, particle_no=id,step=i)
+ vec = ob0.matrix_world.inverted() @ vec
+ pts.append(vec)
+ co = np.array(pts)
+ w = np.ones(n_pts).reshape((n_pts,1))
+ co = np.concatenate((co,w),axis=1).reshape((n_pts*4))
+ s.points.add(n_pts-1)
+ s.points.foreach_set('co',co)
+
+ else:
+ _ob0 = ob0
+ ob0 = convert_object_to_mesh(ob0, apply_modifiers=props.use_modifiers)
+ me = ob0.data
+ n_verts = len(me.vertices)
+ verts = [0]*n_verts*3
+ me.vertices.foreach_get('co',verts)
+ verts = np.array(verts).reshape((-1,3))
+
+ normals = [0]*n_verts*3
+ me.vertices.foreach_get('normal',normals)
+ normals = np.array(normals).reshape((-1,3))
+ #tilt = np.degrees(np.arcsin(normals[:,2]))
+ #tilt = np.arccos(normals[:,2])/2
+
+ verts = np.array(verts).reshape((-1,3))
+ if props.mode in ('LOOPS','EDGES'):
+ bm = bmesh.new()
+ bm.from_mesh(me)
+ bm.verts.ensure_lookup_table()
+ bm.edges.ensure_lookup_table()
+ bm.faces.ensure_lookup_table()
+ todo_edges = list(bm.edges)
+ if props.use_modifiers and props.subdivision_mode != 'ALL':
+ me0, subs = get_mesh_before_subs(_ob0)
+ n_edges0 = len(me0.edges)
+ bpy.data.meshes.remove(me0)
+ if props.subdivision_mode == 'CAGE':
+ todo_edges = todo_edges[:n_edges0*(2**subs)]
+ elif props.subdivision_mode == 'INNER':
+ todo_edges = todo_edges[n_edges0*(2**subs):]
+
+ if props.only_sharp:
+ _todo_edges = []
+ sharp_verts = []
+ for e in todo_edges:
+ edge = me.edges[e.index]
+ if edge.use_edge_sharp:
+ _todo_edges.append(e)
+ sharp_verts.append(edge.vertices[0])
+ sharp_verts.append(edge.vertices[1])
+ todo_edges = _todo_edges
+
+ if props.bounds_selection == 'BOUNDS': todo_edges = [e for e in todo_edges if len(e.link_faces)<2]
+ elif props.bounds_selection == 'INNER': todo_edges = [e for e in todo_edges if len(e.link_faces)>1]
+
+ if props.mode == 'EDGES':
+ ordered_points = [[e.verts[0].index, e.verts[1].index] for e in todo_edges]
+ elif props.mode == 'LOOPS':
+ vert_loops, edge_loops = loops_from_bmesh(todo_edges)
+ if props.only_sharp:
+ ordered_points = []
+ for loop in vert_loops:
+ loop_points = []
+ for v in loop:
+ if v.index in sharp_verts:
+ loop_points.append(v.index)
+ else:
+ if len(loop_points)>1:
+ ordered_points.append(loop_points)
+ loop_points = []
+ if len(loop_points)>1:
+ ordered_points.append(loop_points)
+ #ordered_points = [[v.index for v in loop if v.index in sharp_verts] for loop in vert_loops]
+ else:
+ ordered_points = [[v.index for v in loop] for loop in vert_loops]
+ if props.periodic_selection == 'CLOSED':
+ ordered_points = [points for points in ordered_points if points[0] == points[-1]]
+ elif props.periodic_selection == 'OPEN':
+ ordered_points = [points for points in ordered_points if points[0] != points[-1]]
+ else:
+ try:
+ ordered_points = find_curves(edges, n_verts)
+ except:
+ bpy.data.objects.remove(ob0)
+ return {'CANCELLED'}
+
+ try:
+ weight = get_weight_numpy(ob0.vertex_groups[props.vertex_group], n_verts)
+ if props.invert_vertex_group: weight = 1-weight
+ fact = props.vertex_group_factor
+ if fact > 0:
+ weight = weight*(1-fact) + fact
+ except:
+ weight = None
+
+ # Set curves Tilt
+ '''
+ tilt = []
+ for points in ordered_points:
+ if points[0] == points[-1]: # Closed curve
+ pts0 = [points[-1]] + points[:-1] # i-1
+ pts1 = points[:] # i
+ pts2 = points[1:] + [points[0]] # 1+1
+ else: # Open curve
+ pts0 = [points[0]] + points[:-1] # i-1
+ pts1 = points[:] # i
+ pts2 = points[1:] + [points[-1]] # i+1
+ curve_tilt = []
+ for i0, i1, i2 in zip(pts0, pts1, pts2):
+ pt0 = Vector(verts[i0])
+ pt1 = Vector(verts[i1])
+ pt2 = Vector(verts[i2])
+ tan1 = (pt1-pt0).normalized()
+ tan2 = (pt2-pt1).normalized()
+ vec_tan = -(tan1 + tan2).normalized()
+ vec2 = vec_tan.cross(Vector((0,0,1)))
+ vec_z = vec_tan.cross(vec2)
+ nor = normals[i1]
+ if vec_z.length == 0:
+ vec_z = Vector(nor)
+ ang = vec_z.angle(nor)
+ if nor[2] < 0: ang = 2*pi-ang
+ #if vec_tan[0] > vec_tan[1] and nor[0]>0: ang = -ang
+ #if vec_tan[0] > vec_tan[2] and nor[0]>0: ang = -ang
+ #if vec_tan[0] < vec_tan[1] and nor[1]>0: ang = -ang
+ #if nor[0]*nor[1]*nor[2] < 0: ang = -ang
+ if nor[2] == 0: ang = -5*pi/4
+ #ang = max(ang, np.arccos(nor[2]))
+ curve_tilt.append(ang)
+ #curve_tilt.append(np.arccos(nor[2]))
+ tilt.append(curve_tilt)
+ '''
+ depth = props.pattern_depth
+ offset = props.pattern_offset
+ pattern = [props.pattern0,props.pattern1]
+ update_curve_from_pydata(ob.data, verts, normals, weight, ordered_points, merge_distance=props.clean_distance, pattern=pattern, depth=depth, offset=offset)
+
+
+ bpy.data.objects.remove(ob0)
+ for s in ob.data.splines:
+ s.type = props.spline_type
+ if s.type == 'NURBS':
+ s.use_endpoint_u = props.use_endpoint_u
+ s.order_u = props.nurbs_order
+ ob.data.splines.update()
+ if not props.bool_smooth: bpy.ops.object.shade_flat()
+
+ end_time = time.time()
+ print('Tissue: object "{}" converted to Curve in {:.4f} sec'.format(ob.name, end_time-start_time))
+
+ return {'FINISHED'}
+
+
+class TISSUE_PT_convert_to_curve(Panel):
+ bl_space_type = 'PROPERTIES'
+ bl_region_type = 'WINDOW'
+ bl_context = "data"
+ bl_label = "Tissue Convert to Curve"
+ bl_options = {'DEFAULT_CLOSED'}
+
+ @classmethod
+ def poll(cls, context):
+ try:
+ #bool_curve = context.object.tissue_to_curve.object != None
+ ob = context.object
+ return ob.type == 'CURVE' and ob.tissue.tissue_type == 'TO_CURVE'
+ except:
+ return False
+
+ def draw(self, context):
+ ob = context.object
+ props = ob.tissue_to_curve
+
+ layout = self.layout
+ #layout.use_property_split = True
+ #layout.use_property_decorate = False
+ col = layout.column(align=True)
+ row = col.row(align=True)
+ #col.operator("object.tissue_convert_to_curve_update", icon='FILE_REFRESH', text='Refresh')
+ row.operator("object.tissue_update_tessellate_deps", icon='FILE_REFRESH', text='Refresh') ####
+ lock_icon = 'LOCKED' if ob.tissue.bool_lock else 'UNLOCKED'
+ #lock_icon = 'PINNED' if props.bool_lock else 'UNPINNED'
+ deps_icon = 'LINKED' if ob.tissue.bool_dependencies else 'UNLINKED'
+ row.prop(ob.tissue, "bool_dependencies", text="", icon=deps_icon)
+ row.prop(ob.tissue, "bool_lock", text="", icon=lock_icon)
+ col2 = row.column(align=True)
+ col2.prop(ob.tissue, "bool_run", text="",icon='TIME')
+ col2.enabled = not ob.tissue.bool_lock
+
+ col.separator()
+ row = col.row(align=True)
+ row.prop_search(props, "object", context.scene, "objects")
+ row.prop(props, "use_modifiers", icon='MODIFIER', text='')
+ col.separator()
+ col.label(text='Conversion Mode:')
+ row = col.row(align=True)
+ row.prop(
+ props, "mode", icon='NONE', expand=True,
+ slider=False, toggle=False, icon_only=False, event=False,
+ full_event=False, emboss=True, index=-1)
+ if props.mode == 'PARTICLES':
+ col.separator()
+ col.prop(props, "system")
+ col.separator()
+
+ if props.mode in ('LOOPS', 'EDGES'):
+ row = col.row(align=True)
+ row.prop(props, "use_modifiers")
+ col2 = row.column(align=True)
+ if props.use_modifiers:
+ col2.prop(props, "subdivision_mode", text='', icon='NONE', expand=False,
+ slider=True, toggle=False, icon_only=False, event=False,
+ full_event=False, emboss=True, index=-1)
+ col2.enabled = False
+ for m in props.object.modifiers:
+ if m.type in ('SUBSURF','MULTIRES'): col2.enabled = True
+ col.separator()
+ row = col.row(align=True)
+ row.label(text='Filter Edges:')
+ col2 = row.column(align=True)
+ col2.prop(props, "bounds_selection", text='', icon='NONE', expand=False,
+ slider=True, toggle=False, icon_only=False, event=False,
+ full_event=False, emboss=True, index=-1)
+ col2.prop(props, 'only_sharp')
+ col.separator()
+ if props.mode == 'LOOPS':
+ row = col.row(align=True)
+ row.label(text='Filter Loops:')
+ row.prop(props, "periodic_selection", text='', icon='NONE', expand=False,
+ slider=True, toggle=False, icon_only=False, event=False,
+ full_event=False, emboss=True, index=-1)
+ col.separator()
+
+ col.label(text='Spline Type:')
+ row = col.row(align=True)
+ row.prop(
+ props, "spline_type", text="Spline Type", icon='NONE', expand=True,
+ slider=False, toggle=False, icon_only=False, event=False,
+ full_event=False, emboss=True, index=-1)
+ if props.spline_type == 'NURBS':
+ col.separator()
+ col.label(text='Nurbs Splines:')
+ row = col.row(align=True)
+ row.prop(props, "use_endpoint_u")
+ row.prop(props, "nurbs_order")
+ col.separator()
+ col.prop(props, "bool_smooth")
+ if props.object.type == 'MESH':
+ col.separator()
+ col.label(text='Variable Radius:')
+ row = col.row(align=True)
+ row.prop_search(props, 'vertex_group', props.object, "vertex_groups", text='')
+ row.prop(props, "invert_vertex_group", text="", toggle=True, icon='ARROW_LEFTRIGHT')
+ row.prop(props, "vertex_group_factor")
+ col.separator()
+ col.label(text='Clean Curves:')
+ col.prop(props, "clean_distance")
+ col.separator()
+ col.label(text='Displacement Pattern:')
+ row = col.row(align=True)
+ row.prop(props, "pattern0")
+ row.prop(props, "pattern1")
+ row = col.row(align=True)
+ row.prop(props, "pattern_depth")
+ row.prop(props, "pattern_offset")
diff --git a/mesh_tissue/dual_mesh.py b/mesh_tissue/dual_mesh.py
index c33cb323..5b32d46b 100644
--- a/mesh_tissue/dual_mesh.py
+++ b/mesh_tissue/dual_mesh.py
@@ -41,29 +41,40 @@ class dual_mesh_tessellated(Operator):
('QUAD', 'Quad Faces', ''),
('TRI', 'Triangles', '')],
name="Source Faces",
- description="Source polygons",
- default="QUAD",
+ description="Triangles works with any geometry." \
+ "Quad option is faster when the object has only Quads",
+ default="TRI",
options={'LIBRARY_EDITABLE'}
)
+ link_component : BoolProperty(
+ name="Editable Component",
+ default=False,
+ description="Add Component Object to the Scene"
+ )
+
def execute(self, context):
auto_layer_collection()
ob0 = context.object
name1 = "DualMesh_{}_Component".format(self.source_faces)
# Generate component
if self.source_faces == 'QUAD':
- verts = [(0.0, 0.0, 0.0), (0.0, 0.5, 0.0),
+ verts = [(1.0, 0.0, 0.0), (0.5, 0.0, 0.0),
+ (0.0, 0.0, 0.0), (0.0, 0.5, 0.0),
(0.0, 1.0, 0.0), (0.5, 1.0, 0.0),
(1.0, 1.0, 0.0), (1.0, 0.5, 0.0),
- (1.0, 0.0, 0.0), (0.5, 0.0, 0.0),
- (1/3, 1/3, 0.0), (2/3, 2/3, 0.0)]
+ (2/3, 1/3, 0.0), (1/3, 2/3, 0.0)]
edges = [(0,1), (1,2), (2,3), (3,4), (4,5), (5,6), (6,7),
(7,0), (1,8), (8,7), (3,9), (9,5), (8,9)]
faces = [(7,8,1,0), (8,9,3,2,1), (9,5,4,3), (9,8,7,6,5)]
else:
- verts = [(0.0,0.0,0.0), (0.5,0.0,0.0), (1.0,0.0,0.0), (0.0,1.0,0.0), (0.5,1.0,0.0), (1.0,1.0,0.0)]
- edges = [(0,1), (1,2), (2,5), (5,4), (4,3), (3,0), (1,4)]
- faces = [(0,1,4,3), (1,2,5,4)]
+ verts = [(0.0, 0.0, 0.0), (1.0, 0.0, 0.0),
+ (0.0, 1.0, 0.0), (1.0, 1.0, 0.0),
+ (0.5, 1/3, 0.0), (0.0, 0.5, 0.0),
+ (1.0, 0.5, 0.0), (0.5, 0.0, 0.0)]
+ edges = [(0,5), (1,7), (3,6), (2,3), (2,5), (1,6), (0,7),
+ (4,5), (4,7), (4,6)]
+ faces = [(5,0,7,4), (7,1,6,4), (3,2,5,4,6)]
# check pre-existing component
try:
@@ -78,38 +89,38 @@ class dual_mesh_tessellated(Operator):
me = bpy.data.meshes.new("Dual-Mesh") # add a new mesh
me.from_pydata(verts, edges, faces)
me.update(calc_edges=True, calc_edges_loose=True)
- if self.source_faces == 'QUAD': n_seams = 8
- else: n_seams = 6
- for i in range(n_seams): me.edges[i].use_seam = True
+ if self.source_faces == 'QUAD': seams = (0,1,2,3,4,5,6,9)
+ else: seams = (0,1,2,3,4,5,7)
+ for i in seams: me.edges[i].use_seam = True
ob1 = bpy.data.objects.new(name1, me)
- bpy.context.collection.objects.link(ob1)
# fix visualization issue
- bpy.context.view_layer.objects.active = ob1
- ob1.select_set(True)
- bpy.ops.object.editmode_toggle()
- bpy.ops.object.editmode_toggle()
- ob1.select_set(False)
- # hide component
- ob1.hide_select = True
- ob1.hide_render = True
- ob1.hide_viewport = True
+ if self.link_component:
+ context.collection.objects.link(ob1)
+ context.view_layer.objects.active = ob1
+ ob1.select_set(True)
+ bpy.ops.object.editmode_toggle()
+ bpy.ops.object.editmode_toggle()
+ ob1.select_set(False)
+ ob1.hide_render = True
ob = convert_object_to_mesh(ob0,False,False)
ob.name = 'DualMesh'
- #ob = bpy.data.objects.new("DualMesh", convert_object_to_mesh(ob0,False,False))
- #bpy.context.collection.objects.link(ob)
- #bpy.context.view_layer.objects.active = ob
- #ob.select_set(True)
+ ob.tissue.tissue_type = 'TESSELLATE'
+ ob.tissue.bool_lock = True
ob.tissue_tessellate.component = ob1
ob.tissue_tessellate.generator = ob0
ob.tissue_tessellate.gen_modifiers = self.apply_modifiers
ob.tissue_tessellate.merge = True
ob.tissue_tessellate.bool_dissolve_seams = True
- if self.source_faces == 'TRI': ob.tissue_tessellate.fill_mode = 'FAN'
- bpy.ops.object.update_tessellate()
+ if self.source_faces == 'TRI': ob.tissue_tessellate.fill_mode = 'TRI'
+ bpy.ops.object.tissue_update_tessellate()
+ ob.tissue.bool_lock = False
ob.location = ob0.location
ob.matrix_world = ob0.matrix_world
return {'FINISHED'}
+ def invoke(self, context, event):
+ return context.window_manager.invoke_props_dialog(self)
+
class dual_mesh(Operator):
bl_idname = "object.dual_mesh"
bl_label = "Convert to Dual Mesh"
@@ -135,9 +146,9 @@ class dual_mesh(Operator):
items=[
('BEAUTY', 'Beauty', 'Arrange the new triangles evenly'),
('CLIP', 'Clip',
- 'Split the polygons with an ear clipping algorithm')],
- name="Polygon Method",
- description="Method for splitting the polygons into triangles",
+ 'Split the N-gon with an ear clipping algorithm')],
+ name="N-gon Method",
+ description="Method for splitting the N-gons into triangles",
default="BEAUTY",
options={'LIBRARY_EDITABLE'}
)
@@ -156,12 +167,12 @@ class dual_mesh(Operator):
mode = context.mode
if mode == 'EDIT_MESH':
mode = 'EDIT'
- act = bpy.context.active_object
+ act = context.active_object
if mode != 'OBJECT':
sel = [act]
bpy.ops.object.mode_set(mode='OBJECT')
else:
- sel = bpy.context.selected_objects
+ sel = context.selected_objects
doneMeshes = []
for ob0 in sel:
@@ -190,7 +201,7 @@ class dual_mesh(Operator):
ob.data = ob.data.copy()
bpy.ops.object.select_all(action='DESELECT')
ob.select_set(True)
- bpy.context.view_layer.objects.active = ob0
+ context.view_layer.objects.active = ob0
bpy.ops.object.mode_set(mode='EDIT')
# prevent borders erosion
@@ -256,23 +267,23 @@ class dual_mesh(Operator):
bpy.ops.object.mode_set(mode='EDIT')
# select quad faces
- bpy.context.tool_settings.mesh_select_mode = (False, False, True)
+ context.tool_settings.mesh_select_mode = (False, False, True)
bpy.ops.mesh.select_face_by_sides(number=4, extend=False)
# deselect boundaries
bpy.ops.object.mode_set(mode='OBJECT')
for i in bound_v:
- bpy.context.active_object.data.vertices[i].select = False
+ context.active_object.data.vertices[i].select = False
for i in bound_e:
- bpy.context.active_object.data.edges[i].select = False
+ context.active_object.data.edges[i].select = False
for i in bound_p:
- bpy.context.active_object.data.polygons[i].select = False
+ context.active_object.data.polygons[i].select = False
bpy.ops.object.mode_set(mode='EDIT')
- bpy.context.tool_settings.mesh_select_mode = (False, False, True)
+ context.tool_settings.mesh_select_mode = (False, False, True)
bpy.ops.mesh.edge_face_add()
- bpy.context.tool_settings.mesh_select_mode = (True, False, False)
+ context.tool_settings.mesh_select_mode = (True, False, False)
bpy.ops.mesh.select_all(action='DESELECT')
# delete boundaries
@@ -314,11 +325,12 @@ class dual_mesh(Operator):
for o in clones:
o.data = ob.data
+ bm.free()
for o in sel:
o.select_set(True)
- bpy.context.view_layer.objects.active = act
+ context.view_layer.objects.active = act
bpy.ops.object.mode_set(mode=mode)
return {'FINISHED'}
diff --git a/mesh_tissue/lattice.py b/mesh_tissue/lattice.py
index 296ad91a..5d9c66f0 100644
--- a/mesh_tissue/lattice.py
+++ b/mesh_tissue/lattice.py
@@ -132,7 +132,7 @@ def grid_from_mesh(mesh, swap_uv):
if len(faces_loop) == 0:
running_grid = False
-
+ bm.free()
return verts_grid, edges_grid, faces_grid
@@ -225,12 +225,20 @@ class lattice_along_surface(Operator):
soft_max=1,
description="Lattice displace"
)
+ weight_factor : FloatProperty(
+ name="Factor",
+ default=0,
+ min=0.000,
+ max=1.000,
+ precision=3,
+ description="Thickness factor to use for zero vertex group influence"
+ )
grid_object = ""
source_object = ""
@classmethod
def poll(cls, context):
- try: return bpy.context.object.mode == 'OBJECT'
+ try: return context.object.mode == 'OBJECT'
except: return False
def draw(self, context):
@@ -249,6 +257,9 @@ class lattice_along_surface(Operator):
)
row = col.row()
row.prop(self, "use_groups")
+ if self.use_groups:
+ row = col.row()
+ row.prop(self, "weight_factor")
col.separator()
col.label(text="Scale:")
col.prop(
@@ -277,16 +288,16 @@ class lattice_along_surface(Operator):
def execute(self, context):
if self.source_object == self.grid_object == "" or True:
- if len(bpy.context.selected_objects) != 2:
+ if len(context.selected_objects) != 2:
self.report({'ERROR'}, "Please, select two objects")
return {'CANCELLED'}
- grid_obj = bpy.context.object
+ grid_obj = context.object
if grid_obj.type not in ('MESH', 'CURVE', 'SURFACE'):
self.report({'ERROR'}, "The surface object is not valid. Only Mesh,"
"Curve and Surface objects are allowed.")
return {'CANCELLED'}
obj = None
- for o in bpy.context.selected_objects:
+ for o in context.selected_objects:
if o.name != grid_obj.name and o.type in \
('MESH', 'CURVE', 'SURFACE', 'FONT'):
obj = o
@@ -305,9 +316,9 @@ class lattice_along_surface(Operator):
grid_obj = bpy.data.objects[self.grid_object]
obj = bpy.data.objects[self.source_object]
obj_me = simple_to_mesh(obj)# obj.to_mesh(bpy.context.depsgraph, apply_modifiers=True)
- for o in bpy.context.selected_objects: o.select_set(False)
+ for o in context.selected_objects: o.select_set(False)
grid_obj.select_set(True)
- bpy.context.view_layer.objects.active = grid_obj
+ context.view_layer.objects.active = grid_obj
temp_grid_obj = grid_obj.copy()
temp_grid_obj.data = simple_to_mesh(grid_obj)
@@ -318,7 +329,7 @@ class lattice_along_surface(Operator):
if len(grid_mesh.polygons) > 64 * 64:
bpy.data.objects.remove(temp_grid_obj)
- bpy.context.view_layer.objects.active = obj
+ context.view_layer.objects.active = obj
obj.select_set(True)
self.report({'ERROR'}, "Maximum resolution allowed for Lattice is 64")
return {'CANCELLED'}
@@ -347,7 +358,7 @@ class lattice_along_surface(Operator):
bb = max - min
lattice_loc = (max + min) / 2
bpy.ops.object.add(type='LATTICE')
- lattice = bpy.context.active_object
+ lattice = context.active_object
lattice.location = lattice_loc
lattice.scale = Vector((bb.x / self.scale_x, bb.y / self.scale_y,
bb.z / self.scale_z))
@@ -359,16 +370,14 @@ class lattice_along_surface(Operator):
if bb.z == 0:
lattice.scale.z = 1
- bpy.context.view_layer.objects.active = obj
+ context.view_layer.objects.active = obj
bpy.ops.object.modifier_add(type='LATTICE')
obj.modifiers[-1].object = lattice
# set as parent
if self.set_parent:
- obj.select_set(True)
- lattice.select_set(True)
- bpy.context.view_layer.objects.active = lattice
- bpy.ops.object.parent_set(type='LATTICE')
+ override = {'active_object': obj, 'selected_objects' : [lattice,obj]}
+ bpy.ops.object.parent_set(override, type='OBJECT', keep_transform=False)
# reading grid structure
verts_grid, edges_grid, faces_grid = grid_from_mesh(
@@ -384,15 +393,19 @@ class lattice_along_surface(Operator):
lattice.data.points_u = nu
lattice.data.points_v = nv
lattice.data.points_w = nw
+ if self.use_groups:
+ vg = temp_grid_obj.vertex_groups.active
+ weight_factor = self.weight_factor
for i in range(nu):
for j in range(nv):
for w in range(nw):
if self.use_groups:
try:
- displace = temp_grid_obj.vertex_groups.active.weight(
- verts_grid[i][j]) * scale_normal * bb.z
+ weight_influence = vg.weight(verts_grid[i][j])
except:
- displace = 0#scale_normal * bb.z
+ weight_influence = 0
+ weight_influence = weight_influence * (1 - weight_factor) + weight_factor
+ displace = weight_influence * scale_normal * bb.z
else:
displace = scale_normal * bb.z
target_point = (grid_mesh.vertices[verts_grid[i][j]].co +
@@ -418,7 +431,7 @@ class lattice_along_surface(Operator):
lattice.select_set(True)
obj.select_set(False)
bpy.ops.object.delete(use_global=False)
- bpy.context.view_layer.objects.active = obj
+ context.view_layer.objects.active = obj
obj.select_set(True)
bpy.ops.object.modifier_remove(modifier=obj.modifiers[-1].name)
if nu > 64 or nv > 64:
@@ -433,18 +446,18 @@ class lattice_along_surface(Operator):
#lattice.select_set(False)
obj.select_set(False)
#bpy.ops.object.delete(use_global=False)
- bpy.context.view_layer.objects.active = lattice
+ context.view_layer.objects.active = lattice
lattice.select_set(True)
if self.high_quality_lattice:
- bpy.context.object.data.points_w = 8
+ context.object.data.points_w = 8
else:
- bpy.context.object.data.use_outside = True
+ context.object.data.use_outside = True
if self.hide_lattice:
bpy.ops.object.hide_view_set(unselected=False)
- bpy.context.view_layer.objects.active = obj
+ context.view_layer.objects.active = obj
obj.select_set(True)
lattice.select_set(False)
diff --git a/mesh_tissue/material_tools.py b/mesh_tissue/material_tools.py
new file mode 100644
index 00000000..a734abeb
--- /dev/null
+++ b/mesh_tissue/material_tools.py
@@ -0,0 +1,231 @@
+# ##### BEGIN GPL LICENSE BLOCK #####
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License
+# as published by the Free Software Foundation; either version 2
+# of the License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+#
+# ##### END GPL LICENSE BLOCK #####
+
+# #
+# (c) Alessandro Zomparelli #
+# (2020) #
+# #
+# http://www.co-de-it.com/ #
+# #
+################################################################################
+
+import bpy
+import numpy as np
+
+import colorsys
+from numpy import *
+
+from bpy.types import (
+ Operator,
+ Panel
+ )
+
+from bpy.props import (
+ BoolProperty,
+ EnumProperty,
+ FloatProperty,
+ IntProperty,
+ StringProperty,
+ FloatVectorProperty,
+ IntVectorProperty
+)
+
+from .utils import *
+
+class random_materials(Operator):
+ bl_idname = "object.random_materials"
+ bl_label = "Random Materials"
+ bl_description = "Assign random materials to the faces of the mesh"
+ bl_options = {'REGISTER', 'UNDO'}
+
+ prefix : StringProperty(
+ name="Prefix", default="Random.", description="Name prefix")
+
+ color_A : FloatVectorProperty(name="Color A",
+ subtype='COLOR_GAMMA',
+ min=0,
+ max=1,
+ default=[0,0,0])
+
+ color_B : FloatVectorProperty(name="Color B",
+ subtype='COLOR_GAMMA',
+ min=0,
+ max=1,
+ default=[1,1,1])
+
+ hue : FloatProperty(name="Hue", min=0, max=1, default=0.5)
+ hue_variation : FloatProperty(name="Hue Variation", min=0, max=1, default=0.6)
+
+ seed : IntProperty(
+ name="Seed", default=0, description="Random seed")
+
+ count : IntProperty(
+ name="Count", default=3, min=2, description="Count of random materials")
+
+ generate_materials : BoolProperty(
+ name="Generate Materials", default=False, description="Automatically generates new materials")
+
+ random_colors : BoolProperty(
+ name="Random Colors", default=True, description="Colors are automatically generated")
+
+ executed = False
+
+ @classmethod
+ def poll(cls, context):
+ try: return context.object.type == 'MESH'
+ except: return False
+
+ def draw(self, context):
+ layout = self.layout
+ col = layout.column(align=True)
+ col.prop(self, "seed")
+ col.prop(self, "generate_materials")
+ if self.generate_materials:
+ col.prop(self, "prefix")
+ col.separator()
+ col.prop(self, "count")
+ #row = col.row(align=True)
+ col.separator()
+ col.label(text='Colors:')
+ col.prop(self, "hue")
+ col.prop(self, "hue_variation")
+ #col.prop(self, "random_colors")
+ if not self.random_colors:
+ col.prop(self, "color_A")
+ col.prop(self, "color_B")
+
+ def execute(self, context):
+ bpy.ops.object.mode_set(mode='OBJECT')
+ ob = context.active_object
+ if len(ob.material_slots) == 0 and not self.executed:
+ self.generate_materials = True
+ if self.generate_materials:
+ colA = self.color_A
+ colB = self.color_B
+ h1 = (self.hue - self.hue_variation/2)
+ h2 = (self.hue + self.hue_variation/2)
+ count = self.count
+ ob.data.materials.clear()
+ materials = []
+ for i in range(count):
+ mat_name = '{}{:03d}'.format(self.prefix,i)
+ mat = bpy.data.materials.new(mat_name)
+ if self.random_colors:
+ mat.diffuse_color = colorsys.hsv_to_rgb((h1 + (h2-h1)/(count)*i)%1, 1, 1)[:] + (1,)
+ else:
+ mat.diffuse_color = list(colA + (colB - colA)/(count-1)*i) + [1]
+ ob.data.materials.append(mat)
+ else:
+ count = len(ob.material_slots)
+ np.random.seed(seed=self.seed)
+ n_faces = len(ob.data.polygons)
+ if count > 0:
+ rand = list(np.random.randint(count, size=n_faces))
+ ob.data.polygons.foreach_set('material_index',rand)
+ ob.data.update()
+ self.executed = True
+ return {'FINISHED'}
+
+class weight_to_materials(Operator):
+ bl_idname = "object.weight_to_materials"
+ bl_label = "Weight to Materials"
+ bl_description = "Assign materials to the faces of the mesh according to the active Vertex Group"
+ bl_options = {'REGISTER', 'UNDO'}
+
+ prefix : StringProperty(
+ name="Prefix", default="Weight.", description="Name prefix")
+
+ hue : FloatProperty(name="Hue", min=0, max=1, default=0.5)
+ hue_variation : FloatProperty(name="Hue Variation", min=0, max=1, default=0.3)
+
+ count : IntProperty(
+ name="Count", default=3, min=2, description="Count of random materials")
+
+ generate_materials : BoolProperty(
+ name="Generate Materials", default=False, description="Automatically generates new materials")
+
+ mode : EnumProperty(
+ items=(
+ ('MIN', "Min", "Use the min weight value"),
+ ('MAX', "Max", "Use the max weight value"),
+ ('MEAN', "Mean", "Use the mean weight value")
+ ),
+ default='MEAN',
+ name="Mode"
+ )
+
+ vg = None
+
+ @classmethod
+ def poll(cls, context):
+ try: return context.object.type == 'MESH'
+ except: return False
+
+ def draw(self, context):
+ layout = self.layout
+ col = layout.column(align=True)
+ col.prop(self, "mode")
+ col.prop(self, "generate_materials")
+ if self.generate_materials:
+ col.prop(self, "prefix")
+ col.separator()
+ col.prop(self, "count")
+ #row = col.row(align=True)
+ col.separator()
+ col.label(text='Colors:')
+ col.prop(self, "hue")
+ col.prop(self, "hue_variation")
+
+ def execute(self, context):
+ ob = context.active_object
+ if self.vg == None:
+ self.vg = ob.vertex_groups.active_index
+ vg = ob.vertex_groups[self.vg]
+ if vg == None:
+ self.report({'ERROR'}, "The selected object doesn't have any Vertex Group")
+ return {'CANCELLED'}
+ weight = get_weight_numpy(vg, len(ob.data.vertices))
+ if self.generate_materials:
+ h1 = (self.hue - self.hue_variation/2)
+ h2 = (self.hue + self.hue_variation/2)
+ count = self.count
+ ob.data.materials.clear()
+ materials = []
+ for i in range(count):
+ mat_name = '{}{:03d}'.format(self.prefix,i)
+ mat = bpy.data.materials.new(mat_name)
+ mat.diffuse_color = colorsys.hsv_to_rgb((h1 + (h2-h1)/(count)*i)%1, 1, 1)[:] + (1,)
+ ob.data.materials.append(mat)
+ else:
+ count = len(ob.material_slots)
+
+ faces_weight = []
+ for p in ob.data.polygons:
+ verts_id = np.array([v for v in p.vertices])
+ face_weight = weight[verts_id]
+ if self.mode == 'MIN': w = face_weight.min()
+ if self.mode == 'MAX': w = face_weight.max()
+ if self.mode == 'MEAN': w = face_weight.mean()
+ faces_weight.append(w)
+ faces_weight = np.array(faces_weight)
+ faces_weight = faces_weight * count
+ faces_weight.astype('int')
+ ob.data.polygons.foreach_set('material_index',list(faces_weight))
+ ob.data.update()
+ bpy.ops.object.mode_set(mode='OBJECT')
+ return {'FINISHED'}
diff --git a/mesh_tissue/numba_functions.py b/mesh_tissue/numba_functions.py
index 5edc6176..86e83e54 100644
--- a/mesh_tissue/numba_functions.py
+++ b/mesh_tissue/numba_functions.py
@@ -1,19 +1,196 @@
# SPDX-License-Identifier: GPL-2.0-or-later
import numpy as np
+import time
+import sys
+
+bool_numba = False
+
try:
- from numba import jit
+ from .utils_pip import Pip
+ Pip._ensure_user_site_package()
+ from numba import jit, njit, guvectorize, float64, int32, prange
+ from numba.typed import List
+ bool_numba = True
+except:
+ pass
+ '''
+ try:
+ from .utils_pip import Pip
+ #Pip.upgrade_pip()
+ Pip.install('llvmlite')
+ Pip.install('numba')
+ from numba import jit, njit, guvectorize, float64, int32, prange
+ bool_numba = True
+ print('Tissue: Numba successfully installed!')
+ except:
+ print('Tissue: Numba not loaded correctly. Try restarting Blender')
+ '''
+
+if bool_numba:
+ #from numba import jit, njit, guvectorize, float64, int32, prange
+
+ @njit(parallel=True)
+ def numba_reaction_diffusion(n_verts, n_edges, edge_verts, a, b, brush, diff_a, diff_b, f, k, dt, time_steps):
+ arr = np.arange(n_edges)*2
+ id0 = edge_verts[arr]
+ id1 = edge_verts[arr+1]
+ for i in range(time_steps):
+ lap_a, lap_b = rd_init_laplacian(n_verts)
+ numba_rd_laplacian(id0, id1, a, b, lap_a, lap_b)
+ numba_rd_core(a, b, lap_a, lap_b, diff_a, diff_b, f, k, dt)
+ numba_set_ab(a,b,brush)
+ return a,b
+
+ @njit(parallel=False)
+ def integrate_field(n_edges, id0, id1, values, edge_flow, mult, time_steps):
+ #n_edges = len(edge_flow)
+ for i in range(time_steps):
+ values0 = values
+ for j in range(n_edges):
+ v0 = id0[j]
+ v1 = id1[j]
+ values[v0] -= values0[v1] * edge_flow[j] * 0.001#mult[v1]
+ values[v1] += values0[v0] * edge_flow[j] * 0.001#mult[v0]
+ for j in range(n_edges):
+ v0 = id0[j]
+ v1 = id1[j]
+ values[v0] = max(values[v0],0)
+ values[v1] = max(values[v1],0)
+ return values
+
+ @njit(parallel=True)
+ def numba_reaction_diffusion_anisotropic(n_verts, n_edges, edge_verts, a, b, brush, diff_a, diff_b, f, k, dt, time_steps, grad):
+ arr = np.arange(n_edges)*2
+ id0 = edge_verts[arr]
+ id1 = edge_verts[arr+1]
+ #grad = weight_grad[id0] - weight_grad[id1]
+ #grad = np.abs(grad)
+ #grad /= abs(np.max(grad))
+ #grad = grad*0.98 + 0.02
+ for i in range(time_steps):
+ lap_a, lap_b = rd_init_laplacian(n_verts)
+ numba_rd_laplacian_anisotropic(id0, id1, a, b, lap_a, lap_b, grad)
+ numba_rd_core(a, b, lap_a, lap_b, diff_a, diff_b, f, k, dt)
+ numba_set_ab(a,b,brush)
+ return a,b
+
+ #@guvectorize(['(float64[:] ,float64[:] , float64[:], float64[:], float64[:], float64[:], float64[:], float64[:], float64)'],'(n),(n),(n),(n),(n),(n),(n),(n),()',target='parallel')
+ @njit(parallel=True)
+ def numba_rd_core(a, b, lap_a, lap_b, diff_a, diff_b, f, k, dt):
+ n = len(a)
+ _f = np.full(n, f[0]) if len(f) == 1 else f
+ _k = np.full(n, k[0]) if len(k) == 1 else k
+ _diff_a = np.full(n, diff_a[0]) if len(diff_a) == 1 else diff_a
+ _diff_b = np.full(n, diff_b[0]) if len(diff_b) == 1 else diff_b
+
+ for i in prange(n):
+ fi = _f[i]
+ ki = _k[i]
+ diff_ai = _diff_a[i]
+ diff_bi = _diff_b[i]
+ ab2 = a[i]*b[i]**2
+ a[i] += (diff_ai * lap_a[i] - ab2 + fi*(1-a[i]))*dt
+ b[i] += (diff_bi * lap_b[i] + ab2 - (ki+fi)*b[i])*dt
+
+ @njit(parallel=True)
+ def numba_rd_core_(a, b, lap_a, lap_b, diff_a, diff_b, f, k, dt):
+ ab2 = a*b**2
+ a += (diff_a*lap_a - ab2 + f*(1-a))*dt
+ b += (diff_b*lap_b + ab2 - (k+f)*b)*dt
+
+ @njit(parallel=True)
+ def numba_set_ab(a, b, brush):
+ n = len(a)
+ _brush = np.full(n, brush[0]) if len(brush) == 1 else brush
+ for i in prange(len(b)):
+ b[i] += _brush[i]
+ if b[i] < 0: b[i] = 0
+ elif b[i] > 1: b[i] = 1
+ if a[i] < 0: a[i] = 0
+ elif a[i] > 1: a[i] = 1
+
+
+ #@guvectorize(['(float64[:] ,float64[:] ,float64[:] , float64[:], float64[:], float64[:])'],'(m),(m),(n),(n),(n),(n)',target='parallel')
+ @njit(parallel=True)
+ def numba_rd_laplacian(id0, id1, a, b, lap_a, lap_b):
+ for i in prange(len(id0)):
+ v0 = id0[i]
+ v1 = id1[i]
+ lap_a[v0] += a[v1] - a[v0]
+ lap_a[v1] += a[v0] - a[v1]
+ lap_b[v0] += b[v1] - b[v0]
+ lap_b[v1] += b[v0] - b[v1]
+ #return lap_a, lap_b
+
+ @njit(parallel=True)
+ def numba_rd_laplacian_anisotropic(id0, id1, a, b, lap_a, lap_b, grad):
+ for i in prange(len(id0)):
+ v0 = id0[i]
+ v1 = id1[i]
+ lap_a[v0] += (a[v1] - a[v0])
+ lap_a[v1] += (a[v0] - a[v1])
+ lap_b[v0] -= (b[v1] - b[v0])*grad[i]
+ lap_b[v1] += (b[v0] - b[v1])*grad[i]
+ #return lap_a, lap_b
+ @njit(parallel=True)
+ def numba_rd_neigh_vertices(edge_verts):
+ n_edges = len(edge_verts)/2
+ id0 = np.zeros(n_edges)
+ id1 = np.zeros(n_edges)
+ for i in prange(n_edges):
+ id0[i] = edge_verts[i*2] # first vertex indices for each edge
+ id1[i] = edge_verts[i*2+1] # second vertex indices for each edge
+ return id0, id1
+
+ #@guvectorize(['(float64[:] ,float64[:] , float64[:], float64[:], float64[:])'],'(m),(n),(n),(n),(n)',target='parallel')
+ @njit(parallel=True)
+ #@njit
+ def numba_rd_laplacian_(edge_verts, a, b, lap_a, lap_b):
+ for i in prange(len(edge_verts)/2):
+ v0 = edge_verts[i*2]
+ v1 = edge_verts[i*2+1]
+ lap_a[v0] += a[v1] - a[v0]
+ lap_a[v1] += a[v0] - a[v1]
+ lap_b[v0] += b[v1] - b[v0]
+ lap_b[v1] += b[v0] - b[v1]
+ #return lap_a, lap_b
+
+ @njit(parallel=True)
+ def rd_fill_laplacian(lap_a, lap_b, id0, id1, lap_a0, lap_b0):
+ #for i, j, la0, lb0 in zip(id0,id1,lap_a0,lap_b0):
+ for index in prange(len(id0)):
+ i = id0[index]
+ j = id1[index]
+ la0 = lap_a0[index]
+ lb0 = lap_b0[index]
+ lap_a[i] += la0
+ lap_b[i] += lb0
+ lap_a[j] -= la0
+ lap_b[j] -= lb0
+
+ @njit(parallel=True)
+ def rd_init_laplacian(n_verts):
+ lap_a = np.zeros(n_verts)
+ lap_b = np.zeros(n_verts)
+ return lap_a, lap_b
+
+ '''
@jit
- def numba_reaction_diffusion(n_verts, n_edges, edge_verts, a, b, diff_a, diff_b, f, k, dt, time_steps):
+ def numba_reaction_diffusion(n_verts, n_edges, edge_verts, a, b, diff_a, diff_b, f, k, dt, time_steps, db):
arr = np.arange(n_edges)*2
id0 = edge_verts[arr] # first vertex indices for each edge
id1 = edge_verts[arr+1] # second vertex indices for each edge
+ #dgrad = abs(grad[id1] - grad[id0])
for i in range(time_steps):
lap_a = np.zeros(n_verts)
lap_b = np.zeros(n_verts)
- lap_a0 = a[id1] - a[id0] # laplacian increment for first vertex of each edge
+ b += db
+ lap_a0 = a[id1] - a[id0] # laplacian increment for first vertex of each edge
lap_b0 = b[id1] - b[id0] # laplacian increment for first vertex of each edge
+ #lap_a0 *= dgrad
+ #lap_b0 *= dgrad
for i, j, la0, lb0 in zip(id0,id1,lap_a0,lap_b0):
lap_a[i] += la0
@@ -26,5 +203,198 @@ try:
a += (diff_a*lap_a - ab2 + f*(1-a))*dt
b += (diff_b*lap_b + ab2 - (k+f)*b)*dt
return a, b
-except:
- pass
+ '''
+ '''
+ @njit(parallel=True)
+ def numba_lerp2_(v00, v10, v01, v11, vx, vy):
+ sh = v00.shape
+ co2 = np.zeros((sh[0],len(vx),sh[-1]))
+ for i in prange(len(v00)):
+ for j in prange(len(vx)):
+ for k in prange(len(v00[0][0])):
+ co0 = v00[i][0][k] + (v10[i][0][k] - v00[i][0][k]) * vx[j][0]
+ co1 = v01[i][0][k] + (v11[i][0][k] - v01[i][0][k]) * vx[j][0]
+ co2[i][j][k] = co0 + (co1 - co0) * vy[j][0]
+ return co2
+
+
+ @njit(parallel=True)
+ def numba_lerp2_vec(v0, vx, vy):
+ n_faces = v0.shape[0]
+ co2 = np.zeros((n_faces,len(vx),3))
+ for i in prange(n_faces):
+ for j in prange(len(vx)):
+ for k in prange(3):
+ co0 = v0[i][0][k] + (v0[i][1][k] - v0[i][0][k]) * vx[j][0]
+ co1 = v0[i][3][k] + (v0[i][2][k] - v0[i][3][k]) * vx[j][0]
+ co2[i][j][k] = co0 + (co1 - co0) * vy[j][0]
+ return co2
+
+ @njit(parallel=True)
+ def numba_lerp2__(val, vx, vy):
+ n_faces = len(val)
+ co2 = np.zeros((n_faces,len(vx),1))
+ for i in prange(n_faces):
+ for j in prange(len(vx)):
+ co0 = val[i][0] + (val[i][1] - val[i][0]) * val[j][0]
+ co1 = val[i][3] + (val[i][2] - val[i][3]) * val[j][0]
+ co2[i][j][0] = co0 + (co1 - co0) * vy[j][0]
+ return co2
+ '''
+
+ @njit(parallel=True)
+ def numba_combine_and_flatten(arrays):
+ n_faces = len(arrays)
+ n_verts = len(arrays[0])
+ new_list = [0.0]*n_faces*n_verts*3
+ for i in prange(n_faces):
+ for j in prange(n_verts):
+ for k in prange(3):
+ new_list[i*n_verts*3+j*3+k] = arrays[i][j,k]
+ return new_list
+
+ @njit(parallel=True)
+ def numba_calc_thickness_area_weight(co2,n2,vz,a,weight):
+ shape = co2.shape
+ n_patches = shape[0]
+ n_verts = shape[1]
+ n_co = shape[2]
+ nn = n2.shape[1]-1
+ na = a.shape[1]-1
+ nw = weight.shape[1]-1
+ co3 = np.zeros((n_patches,n_verts,n_co))
+ for i in prange(n_patches):
+ for j in prange(n_verts):
+ for k in prange(n_co):
+ co3[i,j,k] = co2[i,j,k] + n2[i,min(j,nn),k] * vz[0,j,0] * a[i,min(j,na),0] * weight[i,min(j,nw),0]
+ return co3
+ '''
+ @njit(parallel=True)
+ def numba_calc_thickness_area(co2,n2,vz,a):
+ shape = co2.shape
+ n_patches = shape[0]
+ n_verts = shape[1]
+ n_co = shape[2]
+ #co3 = [0.0]*n_patches*n_verts*n_co #np.zeros((n_patches,n_verts,n_co))
+ co3 = np.zeros((n_patches,n_verts,n_co))
+ for i in prange(n_patches):
+ for j in prange(n_verts):
+ for k in prange(n_co):
+ #co3[i,j,k] = co2[i,j,k] + n2[i,j,k] * vz[0,j,0] * a[i,j,0]
+ co3[i,j,k] = co2[i,j,k] + n2[i,min(j,nor_len),k] * vz[0,j,0] * a[i,j,0]
+ return co3
+ '''
+ @njit(parallel=True)
+ def numba_calc_thickness_weight(co2,n2,vz,weight):
+ shape = co2.shape
+ n_patches = shape[0]
+ n_verts = shape[1]
+ n_co = shape[2]
+ nn = n2.shape[1]-1
+ nw = weight.shape[1]-1
+ co3 = np.zeros((n_patches,n_verts,n_co))
+ for i in prange(n_patches):
+ for j in prange(n_verts):
+ for k in prange(n_co):
+ co3[i,j,k] = co2[i,j,k] + n2[i,min(j,nn),k] * vz[0,j,0] * weight[i,min(j,nw),0]
+ return co3
+
+ @njit(parallel=True)
+ def numba_calc_thickness(co2,n2,vz):
+ shape = co2.shape
+ n_patches = shape[0]
+ n_verts = shape[1]
+ n_co = shape[2]
+ nn = n2.shape[1]-1
+ co3 = np.zeros((n_patches,n_verts,n_co))
+ for i in prange(n_patches):
+ for j in prange(n_verts):
+ for k in prange(n_co):
+ co3[i,j,k] = co2[i,j,k] + n2[i,min(j,nn),k] * vz[0,j,0]
+ return co3
+
+ @njit(parallel=True)
+ def numba_interp_points(v00, v10, v01, v11, vx, vy):
+ n_patches = v00.shape[0]
+ n_verts = vx.shape[1]
+ n_verts0 = v00.shape[1]
+ n_co = v00.shape[2]
+ vxy = np.zeros((n_patches,n_verts,n_co))
+ for i in prange(n_patches):
+ for j in prange(n_verts):
+ j0 = min(j,n_verts0-1)
+ for k in prange(n_co):
+ co0 = v00[i,j0,k] + (v10[i,j0,k] - v00[i,j0,k]) * vx[0,j,0]
+ co1 = v01[i,j0,k] + (v11[i,j0,k] - v01[i,j0,k]) * vx[0,j,0]
+ vxy[i,j,k] = co0 + (co1 - co0) * vy[0,j,0]
+ return vxy
+
+ @njit(parallel=True)
+ def numba_interp_points_sk(v00, v10, v01, v11, vx, vy):
+ n_patches = v00.shape[0]
+ n_sk = v00.shape[1]
+ n_verts = v00.shape[2]
+ n_co = v00.shape[3]
+ vxy = np.zeros((n_patches,n_sk,n_verts,n_co))
+ for i in prange(n_patches):
+ for sk in prange(n_sk):
+ for j in prange(n_verts):
+ for k in prange(n_co):
+ co0 = v00[i,sk,j,k] + (v10[i,sk,j,k] - v00[i,sk,j,k]) * vx[0,sk,j,0]
+ co1 = v01[i,sk,j,k] + (v11[i,sk,j,k] - v01[i,sk,j,k]) * vx[0,sk,j,0]
+ vxy[i,sk,j,k] = co0 + (co1 - co0) * vy[0,sk,j,0]
+ return vxy
+
+ @njit
+ def numba_lerp(v0, v1, x):
+ return v0 + (v1 - v0) * x
+
+ @njit
+ def numba_lerp2(v00, v10, v01, v11, vx, vy):
+ co0 = numba_lerp(v00, v10, vx)
+ co1 = numba_lerp(v01, v11, vx)
+ co2 = numba_lerp(co0, co1, vy)
+ return co2
+
+ @njit(parallel=True)
+ def numba_lerp2_________________(v00, v10, v01, v11, vx, vy):
+ ni = len(v00)
+ nj = len(v00[0])
+ nk = len(v00[0][0])
+ co2 = np.zeros((ni,nj,nk))
+ for i in prange(ni):
+ for j in prange(nj):
+ for k in prange(nk):
+ _v00 = v00[i,j,k]
+ _v01 = v01[i,j,k]
+ _v10 = v10[i,j,k]
+ _v11 = v11[i,j,k]
+ co0 = _v00 + (_v10 - _v00) * vx[i,j,k]
+ co1 = _v01 + (_v11 - _v01) * vx[i,j,k]
+ co2[i,j,k] = co0 + (co1 - co0) * vy[i,j,k]
+ return co2
+
+ @njit(parallel=True)
+ def numba_lerp2_4(v00, v10, v01, v11, vx, vy):
+ ni = len(v00)
+ nj = len(v00[0])
+ nk = len(v00[0][0])
+ nw = len(v00[0][0][0])
+ co2 = np.zeros((ni,nj,nk,nw))
+ for i in prange(ni):
+ for j in prange(nj):
+ for k in prange(nk):
+ for w in prange(nw):
+ _v00 = v00[i,j,k]
+ _v01 = v01[i,j,k]
+ _v10 = v10[i,j,k]
+ _v11 = v11[i,j,k]
+ co0 = _v00 + (_v10 - _v00) * vx[i,j,k]
+ co1 = _v01 + (_v11 - _v01) * vx[i,j,k]
+ co2[i,j,k] = co0 + (co1 - co0) * vy[i,j,k]
+ return co2
+
+
+#except:
+# print("Tissue: Numba cannot be installed. Try to restart Blender.")
+# pass
diff --git a/mesh_tissue/polyhedra.py b/mesh_tissue/polyhedra.py
new file mode 100644
index 00000000..394d605a
--- /dev/null
+++ b/mesh_tissue/polyhedra.py
@@ -0,0 +1,557 @@
+# ##### BEGIN GPL LICENSE BLOCK #####
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License
+# as published by the Free Software Foundation; either version 2
+# of the License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+#
+# ##### END GPL LICENSE BLOCK #####
+
+# ---------------------------- ADAPTIVE DUPLIFACES --------------------------- #
+# ------------------------------- version 0.84 ------------------------------- #
+# #
+# Creates duplicates of selected mesh to active morphing the shape according #
+# to target faces. #
+# #
+# (c) Alessandro Zomparelli #
+# (2017) #
+# #
+# http://www.co-de-it.com/ #
+# #
+# ############################################################################ #
+
+
+import bpy
+from bpy.types import (
+ Operator,
+ Panel,
+ PropertyGroup,
+ )
+from bpy.props import (
+ BoolProperty,
+ EnumProperty,
+ FloatProperty,
+ IntProperty,
+ StringProperty,
+ PointerProperty
+ )
+from mathutils import Vector, Quaternion, Matrix
+import numpy as np
+from math import *
+import random, time, copy
+import bmesh
+from .utils import *
+
+class polyhedra_wireframe(Operator):
+ bl_idname = "object.polyhedra_wireframe"
+ bl_label = "Tissue Polyhedra Wireframe"
+ bl_description = "Generate wireframes around the faces.\
+ \nDoesn't works with boundary edges.\
+ \n(Experimental)"
+ bl_options = {'REGISTER', 'UNDO'}
+
+ thickness : FloatProperty(
+ name="Thickness", default=0.1, min=0.001, soft_max=200,
+ description="Wireframe thickness"
+ )
+
+ subdivisions : IntProperty(
+ name="Segments", default=1, min=1, soft_max=10,
+ description="Max sumber of segments, used for the longest edge"
+ )
+
+ #regular_sections : BoolProperty(
+ # name="Regular Sections", default=False,
+ # description="Turn inner loops into polygons"
+ # )
+
+ dissolve_inners : BoolProperty(
+ name="Dissolve Inners", default=False,
+ description="Dissolve inner edges"
+ )
+
+ @classmethod
+ def poll(cls, context):
+ try:
+ #bool_tessellated = context.object.tissue_tessellate.generator != None
+ ob = context.object
+ return ob.type == 'MESH' and ob.mode == 'OBJECT'# and bool_tessellated
+ except:
+ return False
+
+ def invoke(self, context, event):
+ return context.window_manager.invoke_props_dialog(self)
+
+ def execute(self, context):
+
+ merge_dist = self.thickness*0.001
+
+ subs = self.subdivisions
+
+ start_time = time.time()
+ ob = context.object
+ me = simple_to_mesh(ob)
+ bm = bmesh.new()
+ bm.from_mesh(me)
+
+ bm.verts.ensure_lookup_table()
+ bm.edges.ensure_lookup_table()
+ bm.faces.ensure_lookup_table()
+
+ # Subdivide edges
+ proportional_subs = True
+ if subs > 1 and proportional_subs:
+ wire_length = [e.calc_length() for e in bm.edges]
+ all_edges = list(bm.edges)
+ max_segment = max(wire_length)/subs
+ split_edges = [[] for i in range(subs+1)]
+ for e, l in zip(all_edges, wire_length):
+ split_edges[int(l//max_segment)].append(e)
+ for i in range(2,subs):
+ perc = {}
+ for e in split_edges[i]:
+ perc[e]=0.1
+ bmesh.ops.bisect_edges(bm, edges=split_edges[i], cuts=i, edge_percents=perc)
+
+ ### Create double faces
+ double_faces = []
+ double_layer_edge = []
+ double_layer_piece = []
+ for f in bm.faces:
+ verts0 = [v.co for v in f.verts]
+ verts1 = [v.co for v in f.verts]
+ verts1.reverse()
+ double_faces.append(verts0)
+ double_faces.append(verts1)
+
+ # Create new bmesh object and data layers
+ bm1 = bmesh.new()
+
+ # Create faces and assign Edge Layers
+ for verts in double_faces:
+ new_verts = []
+ for v in verts:
+ vert = bm1.verts.new(v)
+ new_verts.append(vert)
+ bm1.faces.new(new_verts)
+
+ bm1.verts.ensure_lookup_table()
+ bm1.edges.ensure_lookup_table()
+ bm1.faces.ensure_lookup_table()
+
+ n_faces = len(bm.faces)
+ n_doubles = len(bm1.faces)
+
+ polyhedra = []
+
+ for e in bm.edges:
+ done = []
+
+ # ERROR: Naked edges
+ e_faces = len(e.link_faces)
+ if e_faces < 2:
+ bm.free()
+ bm1.free()
+ message = "Naked edges are not allowed"
+ self.report({'ERROR'}, message)
+ return {'CANCELLED'}
+
+ edge_vec = e.verts[1].co - e.verts[0].co
+
+ # run first face
+ for i1 in range(e_faces-1):
+ f1 = e.link_faces[i1]
+ #edge_verts1 = [v.index for v in f1.verts if v in e.verts]
+ verts1 = [v.index for v in f1.verts]
+ va1 = verts1.index(e.verts[0].index)
+ vb1 = verts1.index(e.verts[1].index)
+ # chech if order of the edge matches the order of the face
+ dir1 = va1 == (vb1+1)%len(verts1)
+ edge_vec1 = edge_vec if dir1 else -edge_vec
+
+ # run second face
+ faces2 = []
+ normals2 = []
+ for i2 in range(i1+1,e_faces):
+ #for i2 in range(n_faces):
+ if i1 == i2: continue
+ f2 = e.link_faces[i2]
+ f2.normal_update()
+ #edge_verts2 = [v.index for v in f2.verts if v in e.verts]
+ verts2 = [v.index for v in f2.verts]
+ va2 = verts2.index(e.verts[0].index)
+ vb2 = verts2.index(e.verts[1].index)
+ # chech if order of the edge matches the order of the face
+ dir2 = va2 == (vb2+1)%len(verts2)
+ # check for normal consistency
+ if dir1 != dir2:
+ # add face
+ faces2.append(f2.index+1)
+ normals2.append(f2.normal)
+ else:
+ # add flipped face
+ faces2.append(-(f2.index+1))
+ normals2.append(-f2.normal)
+
+
+
+ # find first polyhedra (positive)
+ plane_x = f1.normal # normal
+ plane_y = plane_x.cross(edge_vec1) # tangent face perp edge
+ id1 = (f1.index+1)
+
+ min_angle0 = 10000
+
+ # check consistent faces
+ if id1 not in done:
+ id2 = None
+ min_angle = min_angle0
+ for i2, n2 in zip(faces2,normals2):
+ v2 = flatten_vector(-n2, plane_x, plane_y)
+ angle = vector_rotation(v2)
+ if angle < min_angle:
+ id2 = i2
+ min_angle = angle
+ if id2: done.append(id2)
+ new_poly = True
+ # add to existing polyhedron
+ for p in polyhedra:
+ if id1 in p or id2 in p:
+ new_poly = False
+ if id2 not in p: p.append(id2)
+ if id1 not in p: p.append(id1)
+ break
+ # start new polyhedron
+ if new_poly: polyhedra.append([id1, id2])
+
+ # find second polyhedra (negative)
+ plane_x = -f1.normal # normal
+ plane_y = plane_x.cross(-edge_vec1) # tangent face perp edge
+ id1 = -(f1.index+1)
+
+ if id1 not in done:
+ id2 = None
+ min_angle = min_angle0
+ for i2, n2 in zip(faces2, normals2):
+ v2 = flatten_vector(n2, plane_x, plane_y)
+ angle = vector_rotation(v2)
+ if angle < min_angle:
+ id2 = -i2
+ min_angle = angle
+ done.append(id2)
+ add = True
+ for p in polyhedra:
+ if id1 in p or id2 in p:
+ add = False
+ if id2 not in p: p.append(id2)
+ if id1 not in p: p.append(id1)
+ break
+ if add: polyhedra.append([id1, id2])
+
+ for i in range(len(bm1.faces)):
+ for j in (False,True):
+ if j: id = i+1
+ else: id = -(i+1)
+ join = []
+ keep = []
+ for p in polyhedra:
+ if id in p: join += p
+ else: keep.append(p)
+ if len(join) > 0:
+ keep.append(list(dict.fromkeys(join)))
+ polyhedra = keep
+
+ for i, p in enumerate(polyhedra):
+ for j in p:
+ bm1.faces[j].material_index = i
+
+ end_time = time.time()
+ print('Tissue: Polyhedra wireframe, found {} polyhedra in {:.4f} sec'.format(len(polyhedra), end_time-start_time))
+
+
+ delete_faces = []
+ wireframe_faces = []
+ not_wireframe_faces = []
+ flat_faces = []
+
+ bm.free()
+
+ #bmesh.ops.bisect_edges(bm1, edges=bm1.edges, cuts=3)
+
+ end_time = time.time()
+ print('Tissue: Polyhedra wireframe, subdivide edges in {:.4f} sec'.format(end_time-start_time))
+
+ bm1.faces.index_update()
+ #merge_verts = []
+ for p in polyhedra:
+ delete_faces_poly = []
+ wireframe_faces_poly = []
+ faces_id = [(f-1)*2 if f > 0 else (-f-1)*2+1 for f in p]
+ faces_id_neg = [(-f-1)*2 if -f > 0 else (f-1)*2+1 for f in p]
+ merge_verts = []
+ faces = [bm1.faces[f_id] for f_id in faces_id]
+ for f in faces:
+ delete = False
+ if f.index in delete_faces: continue
+ '''
+ cen = f.calc_center_median()
+ for e in f.edges:
+ mid = (e.verts[0].co + e.verts[1].co)/2
+ vec1 = e.verts[0].co - e.verts[1].co
+ vec2 = mid - cen
+ ang = Vector.angle(vec1,vec2)
+ length = vec2.length
+ #length = sin(ang)*length
+ if length < self.thickness/2:
+ delete = True
+ '''
+ if False:
+ sides = len(f.verts)
+ for i in range(sides):
+ v = f.verts[i].co
+ v0 = f.verts[(i-1)%sides].co
+ v1 = f.verts[(i+1)%sides].co
+ vec0 = v0 - v
+ vec1 = v1 - v
+ ang = (pi - vec0.angle(vec1))/2
+ length = min(vec0.length, vec1.length)*sin(ang)
+ if length < self.thickness/2:
+ delete = True
+ break
+
+ if delete:
+ delete_faces_poly.append(f.index)
+ else:
+ wireframe_faces_poly.append(f.index)
+ merge_verts += [v for v in f.verts]
+ if len(wireframe_faces_poly) < 2:
+ delete_faces += faces_id
+ not_wireframe_faces += faces_id_neg
+ else:
+ wireframe_faces += wireframe_faces_poly
+ flat_faces += delete_faces_poly
+
+ #wireframe_faces = list(dict.fromkeys(wireframe_faces))
+ bmesh.ops.remove_doubles(bm1, verts=merge_verts, dist=merge_dist)
+ bm1.edges.ensure_lookup_table()
+ bm1.faces.ensure_lookup_table()
+ bm1.faces.index_update()
+
+
+ wireframe_faces = [i for i in wireframe_faces if i not in not_wireframe_faces]
+ wireframe_faces = list(dict.fromkeys(wireframe_faces))
+
+ flat_faces = list(dict.fromkeys(flat_faces))
+
+ end_time = time.time()
+ print('Tissue: Polyhedra wireframe, merge and delete in {:.4f} sec'.format(end_time-start_time))
+
+ poly_me = me.copy()
+ bm1.to_mesh(poly_me)
+ poly_me.update()
+ new_ob = bpy.data.objects.new("Polyhedra", poly_me)
+ context.collection.objects.link(new_ob)
+
+ ############# FRAME #############
+ bm1.faces.index_update()
+ wireframe_faces = [bm1.faces[i] for i in wireframe_faces]
+ original_faces = wireframe_faces
+ #bmesh.ops.remove_doubles(bm1, verts=merge_verts, dist=0.001)
+
+ # detect edge loops
+
+ loops = []
+ boundaries_mat = []
+ neigh_face_center = []
+ face_normals = []
+
+ # compute boundary frames
+ new_faces = []
+ wire_length = []
+ vert_ids = []
+
+ # append regular faces
+
+ for f in original_faces:
+ loop = list(f.verts)
+ loops.append(loop)
+ boundaries_mat.append([f.material_index for v in loop])
+ f.normal_update()
+ face_normals.append([f.normal for v in loop])
+
+ push_verts = []
+ inner_loops = []
+
+ for loop_index, loop in enumerate(loops):
+ is_boundary = loop_index < len(neigh_face_center)
+ materials = boundaries_mat[loop_index]
+ new_loop = []
+ loop_ext = [loop[-1]] + loop + [loop[0]]
+
+ # calc tangents
+ tangents = []
+ for i in range(len(loop)):
+ # vertices
+ vert0 = loop_ext[i]
+ vert = loop_ext[i+1]
+ vert1 = loop_ext[i+2]
+ # edge vectors
+ vec0 = (vert0.co - vert.co).normalized()
+ vec1 = (vert.co - vert1.co).normalized()
+ # tangent
+ _vec1 = -vec1
+ _vec0 = -vec0
+ ang = (pi - vec0.angle(vec1))/2
+ normal = face_normals[loop_index][i]
+ tan0 = normal.cross(vec0)
+ tan1 = normal.cross(vec1)
+ tangent = (tan0 + tan1).normalized()/sin(ang)*self.thickness/2
+ tangents.append(tangent)
+
+ # calc correct direction for boundaries
+ mult = -1
+ if is_boundary:
+ dir_val = 0
+ for i in range(len(loop)):
+ surf_point = neigh_face_center[loop_index][i]
+ tangent = tangents[i]
+ vert = loop_ext[i+1]
+ dir_val += tangent.dot(vert.co - surf_point)
+ if dir_val > 0: mult = 1
+
+ # add vertices
+ for i in range(len(loop)):
+ vert = loop_ext[i+1]
+ area = 1
+ new_co = vert.co + tangents[i] * mult * area
+ # add vertex
+ new_vert = bm1.verts.new(new_co)
+ new_loop.append(new_vert)
+ vert_ids.append(vert.index)
+ new_loop.append(new_loop[0])
+
+ # add faces
+ #materials += [materials[0]]
+ for i in range(len(loop)):
+ v0 = loop_ext[i+1]
+ v1 = loop_ext[i+2]
+ v2 = new_loop[i+1]
+ v3 = new_loop[i]
+ face_verts = [v1,v0,v3,v2]
+ if mult == -1: face_verts = [v0,v1,v2,v3]
+ new_face = bm1.faces.new(face_verts)
+ # Material by original edges
+ piece_id = 0
+ new_face.select = True
+ new_faces.append(new_face)
+ wire_length.append((v0.co - v1.co).length)
+ max_segment = max(wire_length)/self.subdivisions
+ #for f,l in zip(new_faces,wire_length):
+ # f.material_index = min(int(l/max_segment), self.subdivisions-1)
+ bm1.verts.ensure_lookup_table()
+ push_verts += [v.index for v in loop_ext]
+
+ # At this point topology han been build, but not yet thickened
+
+ end_time = time.time()
+ print('Tissue: Polyhedra wireframe, frames in {:.4f} sec'.format(end_time-start_time))
+
+ bm1.verts.ensure_lookup_table()
+ bm1.edges.ensure_lookup_table()
+ bm1.faces.ensure_lookup_table()
+ bm1.verts.index_update()
+
+ ### Displace vertices ###
+
+ circle_center = [0]*len(bm1.verts)
+ circle_normal = [0]*len(bm1.verts)
+
+ smooth_corners = [True] * len(bm1.verts)
+ corners = [[] for i in range(len(bm1.verts))]
+ normals = [0]*len(bm1.verts)
+ vertices = [0]*len(bm1.verts)
+ # Define vectors direction
+ for f in new_faces:
+ v0 = f.verts[0]
+ v1 = f.verts[1]
+ id = v0.index
+ corners[id].append((v1.co - v0.co).normalized())
+ normals[id] = v0.normal.copy()
+ vertices[id] = v0
+ smooth_corners[id] = False
+ # Displace vertices
+ for i, vecs in enumerate(corners):
+ if len(vecs) > 0:
+ v = vertices[i]
+ nor = normals[i]
+ ang = 0
+ for vec in vecs:
+ ang += nor.angle(vec)
+ ang /= len(vecs)
+ div = sin(ang)
+ if div == 0: div = 1
+ v.co += nor*self.thickness/2/div
+
+ end_time = time.time()
+ print('Tissue: Polyhedra wireframe, corners displace in {:.4f} sec'.format(end_time-start_time))
+
+ # Removing original flat faces
+
+ flat_faces = [bm1.faces[i] for i in flat_faces]
+ for f in flat_faces:
+ f.material_index = self.subdivisions+1
+ for v in f.verts:
+ if smooth_corners[v.index]:
+ v.co += v.normal*self.thickness/2
+ smooth_corners[v.index] = False
+ delete_faces = delete_faces + [f.index for f in original_faces]
+ delete_faces = list(dict.fromkeys(delete_faces))
+ delete_faces = [bm1.faces[i] for i in delete_faces]
+ bmesh.ops.delete(bm1, geom=delete_faces, context='FACES')
+
+ bmesh.ops.remove_doubles(bm1, verts=bm1.verts, dist=merge_dist)
+ bm1.faces.ensure_lookup_table()
+ bm1.edges.ensure_lookup_table()
+ bm1.verts.ensure_lookup_table()
+
+ if self.dissolve_inners:
+ bm1.edges.index_update()
+ dissolve_edges = []
+ for f in bm1.faces:
+ e = f.edges[2]
+ if e not in dissolve_edges:
+ dissolve_edges.append(e)
+ bmesh.ops.dissolve_edges(bm1, edges=dissolve_edges, use_verts=True, use_face_split=True)
+
+ all_lines = [[] for e in me.edges]
+ all_end_points = [[] for e in me.edges]
+ for v in bm1.verts: v.select_set(False)
+ for f in bm1.faces: f.select_set(False)
+
+ _me = me.copy()
+ bm1.to_mesh(me)
+ me.update()
+ new_ob = bpy.data.objects.new("Wireframe", me)
+ context.collection.objects.link(new_ob)
+ for o in context.scene.objects: o.select_set(False)
+ new_ob.select_set(True)
+ context.view_layer.objects.active = new_ob
+ me = _me
+
+ bm1.free()
+ bpy.data.meshes.remove(_me)
+ #new_ob.location = ob.location
+ new_ob.matrix_world = ob.matrix_world
+
+ end_time = time.time()
+ print('Tissue: Polyhedra wireframe in {:.4f} sec'.format(end_time-start_time))
+ return {'FINISHED'}
diff --git a/mesh_tissue/tessellate_numpy.py b/mesh_tissue/tessellate_numpy.py
index 555bb879..d3c1fc21 100644
--- a/mesh_tissue/tessellate_numpy.py
+++ b/mesh_tissue/tessellate_numpy.py
@@ -28,869 +28,657 @@ from bpy.props import (
StringProperty,
PointerProperty
)
-from mathutils import Vector
+from mathutils import Vector, Quaternion, Matrix
import numpy as np
-from math import sqrt
-import random, time
+from math import *
+import random, time, copy
import bmesh
from .utils import *
-
-def anim_tessellate_active(self, context):
- ob = context.object
- props = ob.tissue_tessellate
- if not props.bool_hold:
- try:
- props.generator.name
- props.component.name
- bpy.ops.object.update_tessellate()
- except: pass
-
-def anim_tessellate_object(ob):
- try:
- #bpy.context.view_layer.objects.active = ob
- bpy.ops.object.update_tessellate()
- except:
- return None
-
-#from bpy.app.handlers import persistent
-
-#@persistent
-def anim_tessellate(scene):
- # store selected objects
- #scene = context.scene
- try: active_object = bpy.context.object
- except: active_object = None
- try: selected_objects = bpy.context.selected_objects
- except: selected_objects = []
- if bpy.context.mode in ('OBJECT', 'PAINT_WEIGHT'):
- old_mode = bpy.context.mode
- if old_mode == 'PAINT_WEIGHT': old_mode = 'WEIGHT_PAINT'
- for ob in scene.objects:
- if ob.tissue_tessellate.bool_run:
- hidden = ob.hide_viewport
- ob.hide_viewport = False
- for o in scene.objects:
- if not o.hide_viewport: ob.select_set(False)
- bpy.context.view_layer.objects.active = ob
- ob.select_set(True)
- try:
- bpy.ops.object.update_tessellate()
- except: pass
- ob.hide_viewport = hidden
- # restore selected objects
- for o in scene.objects:
- if not o.hide_viewport: o.select_set(False)
- for o in selected_objects:
- if not o.hide_viewport: o.select_set(True)
- bpy.context.view_layer.objects.active = active_object
- try: bpy.ops.object.mode_set(mode=old_mode)
- except: pass
+from .weight_tools import *
+from .numba_functions import *
+from .tissue_properties import *
+import os, mathutils
+from pathlib import Path
+
+from . import config
+
+def allowed_objects():
+ return ('MESH', 'CURVE', 'SURFACE', 'FONT', 'META')
+
+def remove_temp_objects():
+ # clean objects
+ for o in bpy.data.objects:
+ if "_tissue_tmp" in o.name:
+ bpy.data.objects.remove(o)
return
-def set_tessellate_handler(self, context):
- old_handlers = []
- blender_handlers = bpy.app.handlers.frame_change_post
- for h in blender_handlers:
- if "anim_tessellate" in str(h):
- old_handlers.append(h)
- for h in old_handlers: blender_handlers.remove(h)
- for o in context.scene.objects:
- if o.tissue_tessellate.bool_run:
- blender_handlers.append(anim_tessellate)
- break
- return
+def tessellated(ob):
+ tess_props = ob.tissue_tessellate
+ if tess_props.generator not in list(bpy.data.objects):
+ return False
+ elif tess_props.component_mode == 'OBJECT':
+ return tess_props.component in list(bpy.data.objects)
+ elif tess_props.component_mode == 'COLLECTION':
+ if tess_props.component_coll in list(bpy.data.collections):
+ for o in list(tess_props.component_coll.objects):
+ if o.type in allowed_objects():
+ return True
+ else:
+ for mat in tess_props.generator.material_slots.keys():
+ if mat in bpy.data.objects.keys():
+ if bpy.data.objects[mat].type in allowed_objects():
+ return True
+ return False
+
+def tessellate_patch(props):
+ tt = time.time()
+
+ ob = props['self']
+ _ob0 = props['generator']
+ components = props['component']
+ offset = props['offset']
+ zscale = props['zscale']
+ gen_modifiers = props['gen_modifiers']
+ com_modifiers = props['com_modifiers']
+ mode = props['mode']
+ fill_mode = props['fill_mode']
+ scale_mode = props['scale_mode']
+ rotation_mode = props['rotation_mode']
+ rotation_shift = props['rotation_shift']
+ rand_seed = props['rand_seed']
+ rand_step = props['rand_step']
+ bool_vertex_group = props['bool_vertex_group']
+ bool_selection = props['bool_selection']
+ bool_shapekeys = props['bool_shapekeys']
+ bool_material_id = props['bool_material_id']
+ material_id = props['material_id']
+ normals_mode = props['normals_mode']
+ bounds_x = props['bounds_x']
+ bounds_y = props['bounds_y']
+ use_origin_offset = props['use_origin_offset']
+ vertex_group_thickness = props['vertex_group_thickness']
+ invert_vertex_group_thickness = props['invert_vertex_group_thickness']
+ vertex_group_thickness_factor = props['vertex_group_thickness_factor']
+ vertex_group_distribution = props['vertex_group_distribution']
+ invert_vertex_group_distribution = props['invert_vertex_group_distribution']
+ vertex_group_distribution_factor = props['vertex_group_distribution_factor']
+ vertex_group_cap_owner = props['vertex_group_cap_owner']
+ vertex_group_cap = props['vertex_group_cap']
+ invert_vertex_group_cap = props['invert_vertex_group_cap']
+ vertex_group_bridge_owner = props['vertex_group_bridge_owner']
+ vertex_group_bridge = props['vertex_group_bridge']
+ invert_vertex_group_bridge = props['invert_vertex_group_bridge']
+ vertex_group_rotation = props['vertex_group_rotation']
+ invert_vertex_group_rotation = props['invert_vertex_group_rotation']
+ rotation_direction = props['rotation_direction']
+ target = props['target']
+ even_thickness = props['even_thickness']
+ even_thickness_iter = props['even_thickness_iter']
+ smooth_normals = props['smooth_normals']
+ smooth_normals_iter = props['smooth_normals_iter']
+ smooth_normals_uv = props['smooth_normals_uv']
+ vertex_group_smooth_normals = props['vertex_group_smooth_normals']
+ invert_vertex_group_smooth_normals = props['invert_vertex_group_smooth_normals']
+ #bool_multi_components = props['bool_multi_components']
+ component_mode = props['component_mode']
+ coll_rand_seed = props['coll_rand_seed']
+ consistent_wedges = props['consistent_wedges']
+ vertex_group_scale_normals = props['vertex_group_scale_normals']
+ invert_vertex_group_scale_normals = props['invert_vertex_group_scale_normals']
+ boundary_mat_offset = props['boundary_mat_offset']
+
+ _props = props.copy()
+
+ # reset messages
+ ob.tissue_tessellate.warning_message_thickness = ''
+
+ if normals_mode == 'SHAPEKEYS':
+ if _ob0.data.shape_keys != None:
+ target = _ob0
+ else:
+ normals_mode = 'VERTS'
+ message = "Base mesh doesn't have Shape Keys"
+ ob.tissue_tessellate.warning_message_thickness = message
+ print("Tissue: " + message)
+ if normals_mode == 'OBJECT' and target == None:
+ normals_mode = 'VERTS'
+ message = "Please select a target object"
+ ob.tissue_tessellate.warning_message_thickness = message
+ print("Tissue: " + message)
-class tissue_tessellate_prop(PropertyGroup):
- bool_hold : BoolProperty(
- name="Hold Update",
- description="Prevent automatic update while other properties are changed",
- default=False
- )
- bool_run : BoolProperty(
- name="Animatable Tessellation",
- description="Automatically recompute the tessellation when the frame is changed. Currently is not working during Render Animation",
- default = False,
- update = set_tessellate_handler
- )
- zscale : FloatProperty(
- name="Scale", default=1, soft_min=0, soft_max=10,
- description="Scale factor for the component thickness",
- update = anim_tessellate_active
- )
- scale_mode : EnumProperty(
- items=(
- ('CONSTANT', "Constant", "Uniform thinkness"),
- ('ADAPTIVE', "Proportional", "Preserve component's proportions")
- ),
- default='ADAPTIVE',
- name="Z-Scale according to faces size",
- update = anim_tessellate_active
- )
- offset : FloatProperty(
- name="Surface Offset",
- default=1,
- min=-1,
- max=1,
- soft_min=-1,
- soft_max=1,
- description="Surface offset",
- update = anim_tessellate_active
- )
- mode : EnumProperty(
- items=(
- ('BOUNDS', "Bounds", "The component fits automatically the size of the target face"),
- ('LOCAL', "Local", "Based on Local coordinates, from 0 to 1"),
- ('GLOBAL', 'Global', "Based on Global coordinates, from 0 to 1")),
- default='BOUNDS',
- name="Component Mode",
- update = anim_tessellate_active
- )
- rotation_mode : EnumProperty(
- items=(('RANDOM', "Random", "Random faces rotation"),
- ('UV', "Active UV", "Rotate according to UV coordinates"),
- ('DEFAULT', "Default", "Default rotation")),
- default='DEFAULT',
- name="Component Rotation",
- update = anim_tessellate_active
- )
- fill_mode : EnumProperty(
- items=(
- ('QUAD', 'Quad', 'Regular quad tessellation. Uses only 3 or 4 vertices'),
- ('FAN', 'Fan', 'Radial tessellation for polygonal faces'),
- ('PATCH', 'Patch', 'Curved tessellation according to the last ' +
- 'Subsurf\n(or Multires) modifiers. Works only with 4 sides ' +
- 'patches.\nAfter the last Subsurf (or Multires) only ' +
- 'deformation\nmodifiers can be used')),
- default='QUAD',
- name="Fill Mode",
- update = anim_tessellate_active
- )
- combine_mode : EnumProperty(
- items=(
- ('LAST', 'Last', 'Show only the last iteration'),
- ('UNUSED', 'Unused', 'Combine each iteration with the unused faces of the previous iteration. Used for branching systems'),
- ('ALL', 'All', 'Combine the result of all iterations')),
- default='LAST',
- name="Combine Mode",
- update = anim_tessellate_active
- )
- gen_modifiers : BoolProperty(
- name="Generator Modifiers",
- default=False,
- description="Apply Modifiers and Shape Keys to the base object",
- update = anim_tessellate_active
- )
- com_modifiers : BoolProperty(
- name="Component Modifiers",
- default=False,
- description="Apply Modifiers and Shape Keys to the component object",
- update = anim_tessellate_active
- )
- merge : BoolProperty(
- name="Merge",
- default=False,
- description="Merge vertices in adjacent duplicates",
- update = anim_tessellate_active
- )
- merge_thres : FloatProperty(
- name="Distance",
- default=0.001,
- soft_min=0,
- soft_max=10,
- description="Limit below which to merge vertices",
- update = anim_tessellate_active
- )
- generator : PointerProperty(
- type=bpy.types.Object,
- name="",
- description="Base object for the tessellation",
- update = anim_tessellate_active
- )
- component : PointerProperty(
- type=bpy.types.Object,
- name="",
- description="Component object for the tessellation",
- #default="",
- update = anim_tessellate_active
- )
- bool_random : BoolProperty(
- name="Randomize",
- default=False,
- description="Randomize component rotation",
- update = anim_tessellate_active
- )
- random_seed : IntProperty(
- name="Seed",
- default=0,
- soft_min=0,
- soft_max=10,
- description="Random seed",
- update = anim_tessellate_active
- )
- bool_vertex_group : BoolProperty(
- name="Map Vertex Group",
- default=False,
- description="Transfer all Vertex Groups from Base object",
- update = anim_tessellate_active
- )
- bool_selection : BoolProperty(
- name="On selected Faces",
- default=False,
- description="Create Tessellation only on selected faces",
- update = anim_tessellate_active
- )
- bool_shapekeys : BoolProperty(
- name="Use Shape Keys",
- default=False,
- description="Transfer Component's Shape Keys. If the name of Vertex "
- "Groups and Shape Keys are the same, they will be "
- "automatically combined",
- update = anim_tessellate_active
- )
- bool_smooth : BoolProperty(
- name="Smooth Shading",
- default=False,
- description="Output faces with smooth shading rather than flat shaded",
- update = anim_tessellate_active
- )
- bool_materials : BoolProperty(
- name="Transfer Materials",
- default=False,
- description="Preserve component's materials",
- update = anim_tessellate_active
- )
- bool_material_id : BoolProperty(
- name="Tessellation on Material ID",
- default=False,
- description="Apply the component only on the selected Material",
- update = anim_tessellate_active
- )
- material_id : IntProperty(
- name="Material ID",
- default=0,
- min=0,
- description="Material ID",
- update = anim_tessellate_active
- )
- bool_dissolve_seams : BoolProperty(
- name="Dissolve Seams",
- default=False,
- description="Dissolve all seam edges",
- update = anim_tessellate_active
- )
- iterations : IntProperty(
- name="Iterations",
- default=1,
- min=1,
- soft_max=5,
- description="Automatically repeat the Tessellation using the "
- + "generated geometry as new base object.\nUseful for "
- + "for branching systems. Dangerous!",
- update = anim_tessellate_active
- )
- bool_combine : BoolProperty(
- name="Combine unused",
- default=False,
- description="Combine the generated geometry with unused faces",
- update = anim_tessellate_active
- )
- bool_advanced : BoolProperty(
- name="Advanced Settings",
- default=False,
- description="Show more settings"
- )
- normals_mode : EnumProperty(
- items=(
- ('VERTS', 'Along Normals', 'Consistent direction based on vertices normal'),
- ('FACES', 'Individual Faces', 'Based on individual faces normal')),
- default='VERTS',
- name="Direction",
- update = anim_tessellate_active
- )
- bool_multi_components : BoolProperty(
- name="Multi Components",
- default=False,
- description="Combine different components according to materials name",
- update = anim_tessellate_active
- )
- error_message : StringProperty(
- name="Error Message",
- default=""
- )
- warning_message : StringProperty(
- name="Warning Message",
- default=""
- )
- bounds_x : EnumProperty(
- items=(
- ('EXTEND', 'Extend', 'Default X coordinates'),
- ('CLIP', 'Clip', 'Trim out of bounds in X direction'),
- ('CYCLIC', 'Cyclic', 'Cyclic components in X direction')),
- default='EXTEND',
- name="Bounds X",
- update = anim_tessellate_active
- )
- bounds_y : EnumProperty(
- items=(
- ('EXTEND', 'Extend', 'Default Y coordinates'),
- ('CLIP', 'Clip', 'Trim out of bounds in Y direction'),
- ('CYCLIC', 'Cyclic', 'Cyclic components in Y direction')),
- default='EXTEND',
- name="Bounds Y",
- update = anim_tessellate_active
- )
- cap_faces : BoolProperty(
- name="Cap Holes",
- default=False,
- description="Cap open edges loops",
- update = anim_tessellate_active
- )
- open_edges_crease : FloatProperty(
- name="Open Edges Crease",
- default=0,
- min=0,
- max=1,
- description="Automatically set crease for open edges",
- update = anim_tessellate_active
- )
-
-def store_parameters(operator, ob):
- ob.tissue_tessellate.bool_hold = True
- ob.tissue_tessellate.generator = bpy.data.objects[operator.generator]
- ob.tissue_tessellate.component = bpy.data.objects[operator.component]
- ob.tissue_tessellate.zscale = operator.zscale
- ob.tissue_tessellate.offset = operator.offset
- ob.tissue_tessellate.gen_modifiers = operator.gen_modifiers
- ob.tissue_tessellate.com_modifiers = operator.com_modifiers
- ob.tissue_tessellate.mode = operator.mode
- ob.tissue_tessellate.rotation_mode = operator.rotation_mode
- ob.tissue_tessellate.merge = operator.merge
- ob.tissue_tessellate.merge_thres = operator.merge_thres
- ob.tissue_tessellate.scale_mode = operator.scale_mode
- ob.tissue_tessellate.bool_random = operator.bool_random
- ob.tissue_tessellate.random_seed = operator.random_seed
- ob.tissue_tessellate.fill_mode = operator.fill_mode
- ob.tissue_tessellate.bool_vertex_group = operator.bool_vertex_group
- ob.tissue_tessellate.bool_selection = operator.bool_selection
- ob.tissue_tessellate.bool_shapekeys = operator.bool_shapekeys
- ob.tissue_tessellate.bool_smooth = operator.bool_smooth
- ob.tissue_tessellate.bool_materials = operator.bool_materials
- ob.tissue_tessellate.bool_material_id = operator.bool_material_id
- ob.tissue_tessellate.material_id = operator.material_id
- ob.tissue_tessellate.bool_dissolve_seams = operator.bool_dissolve_seams
- ob.tissue_tessellate.iterations = operator.iterations
- ob.tissue_tessellate.bool_advanced = operator.bool_advanced
- ob.tissue_tessellate.normals_mode = operator.normals_mode
- ob.tissue_tessellate.bool_combine = operator.bool_combine
- ob.tissue_tessellate.bool_multi_components = operator.bool_multi_components
- ob.tissue_tessellate.combine_mode = operator.combine_mode
- ob.tissue_tessellate.bounds_x = operator.bounds_x
- ob.tissue_tessellate.bounds_y = operator.bounds_y
- ob.tissue_tessellate.cap_faces = operator.cap_faces
- ob.tissue_tessellate.bool_hold = False
- return ob
-
-def tessellate_patch(_ob0, _ob1, offset, zscale, com_modifiers, mode,
- scale_mode, rotation_mode, rand_seed, bool_vertex_group,
- bool_selection, bool_shapekeys, bool_material_id, material_id,
- bounds_x, bounds_y):
random.seed(rand_seed)
+ if len(_ob0.modifiers) == 0: gen_modifiers = False
- ob0 = convert_object_to_mesh(_ob0)
- me0 = _ob0.data
-
- # Check if zero faces are selected
- if _ob0.type == 'MESH':
- bool_cancel = True
- for p in me0.polygons:
- check_sel = check_mat = False
- if not bool_selection or p.select: check_sel = True
- if not bool_material_id or p.material_index == material_id: check_mat = True
- if check_sel and check_mat:
- bool_cancel = False
- break
- if bool_cancel:
- return 0
+ # Target mesh used for normals
+ if normals_mode in ('SHAPEKEYS', 'OBJECT'):
+ if fill_mode == 'PATCH':
+ ob0_sk = convert_object_to_mesh(target, True, True)
+ else:
+ use_modifiers = gen_modifiers
+ if normals_mode == 'SHAPEKEYS' and not gen_modifiers:
+ target = _ob0
+ for m in target.modifiers:
+ m.show_viewport = False
+ use_modifiers = True
+ _props['use_modifiers'] = use_modifiers
+ if fill_mode == 'FAN': ob0_sk = convert_to_fan(target, _props, add_id_layer=id_layer)
+ elif fill_mode == 'FRAME': ob0_sk = convert_to_frame(target, _props)
+ elif fill_mode == 'TRI': ob0_sk = convert_to_triangles(target, _props)
+ elif fill_mode == 'QUAD': ob0_sk = reduce_to_quads(target, _props)
+ me0_sk = ob0_sk.data
+ normals_target = get_vertices_numpy(me0_sk)
+ bpy.data.objects.remove(ob0_sk)
+ if normals_mode == 'SHAPEKEYS':
+ key_values0 = [sk.value for sk in _ob0.data.shape_keys.key_blocks]
+ for sk in _ob0.data.shape_keys.key_blocks: sk.value = 0
+ # Base mesh
+ if fill_mode == 'PATCH':
+ ob0 = convert_object_to_mesh(_ob0)
+
+ if boundary_mat_offset != 0:
+ bm=bmesh.new()
+ bm.from_mesh(ob0.data)
+ bm = offset_boundary_materials(
+ bm,
+ boundary_mat_offset = _props['boundary_mat_offset'],
+ boundary_variable_offset = _props['boundary_variable_offset'],
+ auto_rotate_boundary = _props['auto_rotate_boundary'])
+ bm.to_mesh(ob0.data)
+ bm.free()
+ ob0.data.update()
+
+ else:
+ if fill_mode == 'FAN':
+ id_layer = component_mode == 'COLLECTION' and consistent_wedges
+ ob0 = convert_to_fan(_ob0, _props, add_id_layer=id_layer)
+ elif fill_mode == 'FRAME': ob0 = convert_to_frame(_ob0, _props)
+ elif fill_mode == 'TRI': ob0 = convert_to_triangles(_ob0, _props)
+ elif fill_mode == 'QUAD': ob0 = reduce_to_quads(_ob0, _props)
+ ob0.name = "_tissue_tmp_ob0"
+ me0 = ob0.data
+ n_verts0 = len(me0.vertices)
+
+ # read vertices coordinates
+ verts0_co = get_vertices_numpy(me0)
+
+ # base normals
+ if normals_mode in ('SHAPEKEYS','OBJECT'):
+ if len(normals_target) != len(me0.vertices):
+ normals_mode = 'VERTS'
+ message = "Base mesh and Target mesh don't match"
+ ob.tissue_tessellate.warning_message_thickness = message
+ print("Tissue: " + message)
+ else:
+ if normals_mode == 'SHAPEKEYS':
+ for sk, val in zip(_ob0.data.shape_keys.key_blocks, key_values0): sk.value = val
+ verts0_normal = normals_target - verts0_co
+ '''
+ While in Relative thickness method the components are built
+ between the two surfaces, in Constant mode the thickness is uniform.
+ '''
+ if scale_mode == 'CONSTANT':
+ # Normalize vectors
+ verts0_normal /= np.linalg.norm(verts0_normal, axis=1).reshape((-1,1))
+ if not even_thickness:
+ pass
+ #original_normals = get_normals_numpy(me0)
+ #verts0_normal /= np.multiply(verts0_normal, original_normals).sum(1)[:,None]
+ else:
+ # Evaluate maximum components thickness
+ first_component = True
+ for com in components:
+ if com:
+ com = convert_object_to_mesh(com, com_modifiers, False)
+ com, com_area = tessellate_prepare_component(com, props)
+ com_verts = get_vertices_numpy(com.data)
+ bpy.data.objects.remove(com)
+ if first_component:
+ all_com_verts = com_verts
+ first_component = False
+ else:
+ all_com_verts = np.concatenate((all_com_verts, com_verts), axis=0)
+ pos_step_dist = abs(np.max(all_com_verts[:,2]))
+ neg_step_dist = abs(np.min(all_com_verts[:,2]))
+
+ # Rescale normalized vectors according to the angle with the normals
+ original_normals = get_normals_numpy(me0)
+ kd = mathutils.kdtree.KDTree(len(verts0_co))
+ for i, v in enumerate(verts0_co):
+ kd.insert(v, i)
+ kd.balance()
+ step_dist = [neg_step_dist, pos_step_dist]
+ mult = 1
+ sign = [-1,1]
+ for sgn, stp in zip(sign, step_dist):
+ if stp == 0:
+ if sgn == 1: verts0_normal_pos = verts0_normal
+ if sgn == -1: verts0_normal_neg = verts0_normal
+ continue
+ for i in range(even_thickness_iter):
+ test_dist = stp * mult
+ test_pts = verts0_co + verts0_normal * test_dist * sgn
+ # Find the closest point to the sample point
+ closest_dist = []
+ closest_co = []
+ closest_nor = []
+ closest_index = []
+ for find in test_pts:
+ co, index, dist = kd.find(find)
+ closest_co.append(co) # co, index, dist
+ closest_index.append(index) # co, index, dist
+ closest_co = np.array(closest_co)#[:,3,None]
+ closest_index = np.array(closest_index)
+ closest_nor = original_normals[closest_index]
+ closest_vec = test_pts - closest_co
+ projected_vectors = np.multiply(closest_vec, closest_nor).sum(1)[:,None]
+ closest_dist = np.linalg.norm(projected_vectors, axis=1)[:,None]
+ mult = mult*0.2 + test_dist/closest_dist*0.8 # Reduces bouncing effect
+ if sgn == 1: verts0_normal_pos = verts0_normal * mult
+ if sgn == -1: verts0_normal_neg = verts0_normal * mult
+
+ if normals_mode in ('VERTS','FACES'):
+ verts0_normal = get_normals_numpy(me0)
levels = 0
- sculpt_levels = 0
- render_levels = 0
- bool_multires = False
- multires_name = ""
not_allowed = ['FLUID_SIMULATION', 'ARRAY', 'BEVEL', 'BOOLEAN', 'BUILD',
'DECIMATE', 'EDGE_SPLIT', 'MASK', 'MIRROR', 'REMESH',
'SCREW', 'SOLIDIFY', 'TRIANGULATE', 'WIREFRAME', 'SKIN',
'EXPLODE', 'PARTICLE_INSTANCE', 'PARTICLE_SYSTEM', 'SMOKE']
- modifiers0 = list(_ob0.modifiers)#[m for m in ob0.modifiers]
- show_modifiers = [m.show_viewport for m in _ob0.modifiers]
- show_modifiers.reverse()
- modifiers0.reverse()
- for m in modifiers0:
- visible = m.show_viewport
- #m.show_viewport = False
- if m.type in ('SUBSURF', 'MULTIRES') and visible:
- levels = m.levels
- multires_name = m.name
- if m.type == 'MULTIRES':
- bool_multires = True
- multires_name = m.name
- sculpt_levels = m.sculpt_levels
- render_levels = m.render_levels
- else: bool_multires = False
- break
- elif m.type in not_allowed:
- #ob0.data = old_me0
- #bpy.data.meshes.remove(me0)
- return "modifiers_error"
-
- before = _ob0.copy()
- #if ob0.type == 'MESH': before.data = me0
- before_mod = list(before.modifiers)
- before_mod.reverse()
- for m in before_mod:
- if m.type in ('SUBSURF', 'MULTIRES') and m.show_viewport:
- before.modifiers.remove(m)
- break
- else: before.modifiers.remove(m)
-
- before_subsurf = simple_to_mesh(before)
-
- before_bm = bmesh.new()
- before_bm.from_mesh(before_subsurf)
- before_bm.faces.ensure_lookup_table()
- for f in before_bm.faces:
- if len(f.loops) != 4:
- return "topology_error"
- before_bm.edges.ensure_lookup_table()
- for e in before_bm.edges:
- if len(e.link_faces) == 0:
- return "wires_error"
- before_bm.verts.ensure_lookup_table()
- for v in before_bm.verts:
- if len(v.link_faces) == 0:
- return "verts_error"
-
- me0 = ob0.data
- verts0 = me0.vertices # Collect generator vertices
-
- if com_modifiers or _ob1.type != 'MESH': bool_shapekeys = False
-
- # set Shape Keys to zero
- if bool_shapekeys:
- try:
- original_key_values = []
- for sk in _ob1.data.shape_keys.key_blocks:
- original_key_values.append(sk.value)
- sk.value = 0
- except:
- bool_shapekeys = False
-
- if not com_modifiers and not bool_shapekeys:
- mod_visibility = []
- for m in _ob1.modifiers:
- mod_visibility.append(m.show_viewport)
- m.show_viewport = False
- com_modifiers = True
-
- ob1 = convert_object_to_mesh(_ob1, com_modifiers, False)
- me1 = ob1.data
-
- if mode != 'BOUNDS':
- bpy.context.object.active_shape_key_index = 0
- # Bound X
- if bounds_x != 'EXTEND':
- if mode == 'GLOBAL':
- planes_co = ((0,0,0),(1,1,1))
- plane_no = (1,0,0)
- if mode == 'LOCAL':
- planes_co = (ob1.matrix_world @ Vector((0,0,0)), ob1.matrix_world @ Vector((1,0,0)))
- plane_no = planes_co[0]-planes_co[1]
- bpy.ops.object.mode_set(mode='EDIT')
- for co in planes_co:
- bpy.ops.mesh.select_all(action='SELECT')
- bpy.ops.mesh.bisect(plane_co=co, plane_no=plane_no)
- bpy.ops.mesh.mark_seam()
- bpy.ops.object.mode_set(mode='OBJECT')
- _faces = ob1.data.polygons
- if mode == 'GLOBAL':
- for f in [f for f in _faces if (ob1.matrix_world @ f.center).x > 1]:
- f.select = True
- for f in [f for f in _faces if (ob1.matrix_world @ f.center).x < 0]:
- f.select = True
- else:
- for f in [f for f in _faces if f.center.x > 1]:
- f.select = True
- for f in [f for f in _faces if f.center.x < 0]:
- f.select = True
- bpy.ops.object.mode_set(mode='EDIT')
- bpy.ops.mesh.select_mode(type='FACE')
- if bounds_x == 'CLIP':
- bpy.ops.mesh.delete(type='FACE')
- bpy.ops.object.mode_set(mode='OBJECT')
- if bounds_x == 'CYCLIC':
- bpy.ops.mesh.split()
- bpy.ops.object.mode_set(mode='OBJECT')
- # Bound Y
- if bounds_y != 'EXTEND':
- if mode == 'GLOBAL':
- planes_co = ((0,0,0),(1,1,1))
- plane_no = (0,1,0)
- if mode == 'LOCAL':
- planes_co = (ob1.matrix_world @ Vector((0,0,0)), ob1.matrix_world @ Vector((0,1,0)))
- plane_no = planes_co[0]-planes_co[1]
- bpy.ops.object.mode_set(mode='EDIT')
- for co in planes_co:
- bpy.ops.mesh.select_all(action='SELECT')
- bpy.ops.mesh.bisect(plane_co=co, plane_no=plane_no)
- bpy.ops.mesh.mark_seam()
- bpy.ops.object.mode_set(mode='OBJECT')
- _faces = ob1.data.polygons
- if mode == 'GLOBAL':
- for f in [f for f in _faces if (ob1.matrix_world @ f.center).y > 1]:
- f.select = True
- for f in [f for f in _faces if (ob1.matrix_world @ f.center).y < 0]:
- f.select = True
- else:
- for f in [f for f in _faces if f.center.y > 1]:
- f.select = True
- for f in [f for f in _faces if f.center.y < 0]:
- f.select = True
-
- bpy.ops.object.mode_set(mode='EDIT')
- bpy.ops.mesh.select_mode(type='FACE')
- if bounds_y == 'CLIP':
- bpy.ops.mesh.delete(type='FACE')
- bpy.ops.object.mode_set(mode='OBJECT')
- if bounds_y == 'CYCLIC':
- bpy.ops.mesh.split()
- bpy.ops.object.mode_set(mode='OBJECT')
- bpy.ops.object.mode_set(mode='OBJECT')
-
- # Component statistics
- n_verts = len(me1.vertices)
-
- # Create empty lists
- new_verts = []
- new_edges = []
- new_faces = []
- new_verts_np = np.array(())
-
- # Component bounding box
- min_c = Vector((0, 0, 0))
- max_c = Vector((0, 0, 0))
- first = True
- for v in me1.vertices:
- vert = v.co
- if vert[0] < min_c[0] or first:
- min_c[0] = vert[0]
- if vert[1] < min_c[1] or first:
- min_c[1] = vert[1]
- if vert[2] < min_c[2] or first:
- min_c[2] = vert[2]
- if vert[0] > max_c[0] or first:
- max_c[0] = vert[0]
- if vert[1] > max_c[1] or first:
- max_c[1] = vert[1]
- if vert[2] > max_c[2] or first:
- max_c[2] = vert[2]
- first = False
- bb = max_c - min_c
-
- # adaptive XY
- verts1 = []
- for v in me1.vertices:
- if mode == 'BOUNDS':
- vert = v.co - min_c # (ob1.matrix_world * v.co) - min_c
- vert[0] = (vert[0] / bb[0] if bb[0] != 0 else 0.5)
- vert[1] = (vert[1] / bb[1] if bb[1] != 0 else 0.5)
- vert[2] = (vert[2] + (-0.5 + offset * 0.5) * bb[2]) * zscale
- elif mode == 'LOCAL':
- vert = v.co.xyz
- vert[2] *= zscale
- #vert[2] = (vert[2] - min_c[2] + (-0.5 + offset * 0.5) * bb[2]) * zscale
- elif mode == 'GLOBAL':
- vert = ob1.matrix_world @ v.co
- vert[2] *= zscale
- try:
- for sk in me1.shape_keys.key_blocks:
- sk.data[v.index].co = ob1.matrix_world @ sk.data[v.index].co
- except: pass
- #verts1.append(vert)
- v.co = vert
-
- # Bounds X, Y
- if mode != 'BOUNDS':
- if bounds_x == 'CYCLIC':
- move_verts = []
- for f in [f for f in me1.polygons if (f.center).x > 1]:
- for v in f.vertices:
- if v not in move_verts: move_verts.append(v)
- for v in move_verts:
- me1.vertices[v].co.x -= 1
- try:
- _ob1.active_shape_key_index = 0
- for sk in me1.shape_keys.key_blocks:
- sk.data[v].co.x -= 1
- except: pass
- move_verts = []
- for f in [f for f in me1.polygons if (f.center).x < 0]:
- for v in f.vertices:
- if v not in move_verts: move_verts.append(v)
- for v in move_verts:
- me1.vertices[v].co.x += 1
- try:
- _ob1.active_shape_key_index = 0
- for sk in me1.shape_keys.key_blocks:
- sk.data[v].co.x += 1
- except: pass
- if bounds_y == 'CYCLIC':
- move_verts = []
- for f in [f for f in me1.polygons if (f.center).y > 1]:
- for v in f.vertices:
- if v not in move_verts: move_verts.append(v)
- for v in move_verts:
- me1.vertices[v].co.y -= 1
- try:
- _ob1.active_shape_key_index = 0
- for sk in me1.shape_keys.key_blocks:
- sk.data[v].co.y -= 1
- except: pass
- move_verts = []
- for f in [f for f in me1.polygons if (f.center).y < 0]:
- for v in f.vertices:
- if v not in move_verts: move_verts.append(v)
- for v in move_verts:
- me1.vertices[v].co.y += 1
- try:
- _ob1.active_shape_key_index = 0
- for sk in me1.shape_keys.key_blocks:
- sk.data[v].co.y += 1
- except: pass
- verts1 = [v.co for v in me1.vertices]
+ modifiers0 = list(_ob0.modifiers)
+ if len(modifiers0) == 0 or fill_mode != 'PATCH':
+ before_subsurf = me0
+ if fill_mode == 'PATCH':
+ fill_mode = 'QUAD'
+ else:
+ show_modifiers = [m.show_viewport for m in _ob0.modifiers]
+ show_modifiers.reverse()
+ modifiers0.reverse()
+ for m in modifiers0:
+ visible = m.show_viewport
+ if not visible: continue
+ #m.show_viewport = False
+ if m.type in ('SUBSURF', 'MULTIRES') and visible:
+ levels = m.levels
+ break
+ elif m.type in not_allowed:
+ bpy.data.meshes.remove(ob0.data)
+ #bpy.data.meshes.remove(me0)
+ return "modifiers_error"
+
+ before = _ob0.copy()
+ before.name = _ob0.name + "_before_subs"
+ bpy.context.collection.objects.link(before)
+ #if ob0.type == 'MESH': before.data = me0
+ before_mod = list(before.modifiers)
+ before_mod.reverse()
+ for m in before_mod:
+ if m.type in ('SUBSURF', 'MULTIRES') and m.show_viewport:
+ before.modifiers.remove(m)
+ break
+ else: before.modifiers.remove(m)
+
+ before_subsurf = simple_to_mesh(before)
+
+ if boundary_mat_offset != 0:
+ bm=bmesh.new()
+ bm.from_mesh(before_subsurf)
+ bm = offset_boundary_materials(
+ bm,
+ boundary_mat_offset = _props['boundary_mat_offset'],
+ boundary_variable_offset = _props['boundary_variable_offset'],
+ auto_rotate_boundary = _props['auto_rotate_boundary'])
+ bm.to_mesh(before_subsurf)
+ bm.free()
+ before_subsurf.update()
+
+ bpy.data.objects.remove(before)
+
+ tt = tissue_time(tt, "Meshes preparation", levels=2)
+
+ ### PATCHES ###
patch_faces = 4**levels
sides = int(sqrt(patch_faces))
+ step = 1/sides
sides0 = sides-2
patch_faces0 = int((sides-2)**2)
- n_patches = int(len(me0.polygons)/patch_faces)
- if len(me0.polygons)%patch_faces != 0:
- #ob0.data = old_me0
- return "topology_error"
-
- new_verts = []
- new_edges = []
- new_faces = []
- for o in bpy.context.view_layer.objects: o.select_set(False)
- new_patch = None
+ if fill_mode == 'PATCH':
+ all_verts, mask, materials = get_patches(before_subsurf, me0, 4, levels, bool_selection)
+ else:
+ all_verts, mask, materials = get_quads(me0, bool_selection)
+ n_patches = len(all_verts)
- # All vertex group
- if bool_vertex_group:
- try:
- weight = []
- for vg in ob0.vertex_groups:
- _weight = []
- for v in me0.vertices:
- try:
- _weight.append(vg.weight(v.index))
- except:
- _weight.append(0)
- weight.append(_weight)
- except:
- bool_vertex_group = False
+ tt = tissue_time(tt, "Indexing", levels=2)
- # Adaptive Z
- if scale_mode == 'ADAPTIVE':
- if mode == 'BOUNDS': com_area = (bb[0]*bb[1])
- else: com_area = 1
- mult = 1/com_area*patch_faces
- verts_area = []
- bm = bmesh.new()
- bm.from_mesh(me0)
- bm.verts.ensure_lookup_table()
- for v in bm.verts:
- area = 0
- faces = v.link_faces
- for f in faces:
- area += f.calc_area()
- area/=len(faces)
- area*=mult
- verts_area.append(sqrt(area))
+ ### WEIGHT ###
- random.seed(rand_seed)
- bool_correct = False
+ # Check if possible to use Weight Rotation
+ if rotation_mode == 'WEIGHT':
+ if not vertex_group_rotation in ob0.vertex_groups.keys():
+ rotation_mode = 'DEFAULT'
- _faces = [[[0] for ii in range(sides)] for jj in range(sides)]
- _verts = [[[0] for ii in range(sides+1)] for jj in range(sides+1)]
+ bool_weight_smooth_normals = vertex_group_smooth_normals in ob0.vertex_groups.keys()
+ bool_weight_thickness = vertex_group_thickness in ob0.vertex_groups.keys()
+ bool_weight_distribution = vertex_group_distribution in ob0.vertex_groups.keys()
+ bool_weight_cap = vertex_group_cap_owner == 'BASE' and vertex_group_cap in ob0.vertex_groups.keys()
+ bool_weight_bridge = vertex_group_bridge_owner == 'BASE' and vertex_group_bridge in ob0.vertex_groups.keys()
+ bool_weight_normals = vertex_group_scale_normals in ob0.vertex_groups.keys()
- for i in range(n_patches):
- poly = me0.polygons[i*patch_faces]
- if bool_selection and not poly.select: continue
- if bool_material_id and not poly.material_index == material_id: continue
+ read_vertex_groups = bool_vertex_group or rotation_mode == 'WEIGHT' or bool_weight_thickness or bool_weight_cap or bool_weight_bridge or bool_weight_smooth_normals or bool_weight_distribution or bool_weight_normals
+ weight = weight_thickness = weight_rotation = None
+ if read_vertex_groups:
+ if bool_vertex_group:
+ weight = [get_weight(vg, n_verts0) for vg in ob0.vertex_groups]
+ weight = np.array(weight)
+ n_vg = len(ob0.vertex_groups)
+ if rotation_mode == 'WEIGHT':
+ vg_id = ob0.vertex_groups[vertex_group_rotation].index
+ weight_rotation = weight[vg_id]
+ if bool_weight_smooth_normals:
+ vg_id = ob0.vertex_groups[bool_weight_smooth_normals].index
+ weight_rotation = weight[vg_id]
+ if bool_weight_distribution:
+ vg_id = ob0.vertex_groups[vertex_group_distribution].index
+ weight_distribution = weight[vg_id]
+ if bool_weight_normals:
+ vg_id = ob0.vertex_groups[vertex_group_scale_normals].index
+ weight_normals = weight[vg_id]
+ else:
+ if rotation_mode == 'WEIGHT':
+ vg = ob0.vertex_groups[vertex_group_rotation]
+ weight_rotation = get_weight_numpy(vg, n_verts0)
+ if bool_weight_smooth_normals:
+ vg = ob0.vertex_groups[vertex_group_smooth_normals]
+ weight_smooth_normals = get_weight_numpy(vg, n_verts0)
+ if bool_weight_distribution:
+ vg = ob0.vertex_groups[vertex_group_distribution]
+ weight_distribution = get_weight_numpy(vg, n_verts0)
+ if bool_weight_normals:
+ vg = ob0.vertex_groups[vertex_group_scale_normals]
+ weight_normals = get_weight_numpy(vg, n_verts0)
+
+ if component_mode == 'COLLECTION':
+ np.random.seed(coll_rand_seed)
+ if fill_mode == 'FAN' and consistent_wedges:
+ bm0 = bmesh.new()
+ bm0.from_mesh(me0)
+ bm0.faces.ensure_lookup_table()
+ lay_id = bm0.faces.layers.int["id"]
+ faces_id = np.array([f[lay_id] for f in bm0.faces])
+ bm0.clear()
+ n_original_faces = faces_id[-1]+1
+ coll_materials = np.random.randint(len(components),size=n_original_faces)
+ coll_materials = coll_materials[faces_id]
+ else:
+ coll_materials = np.random.randint(len(components),size=n_patches)
+ gradient_distribution = []
+ if bool_weight_distribution:
+ if invert_vertex_group_distribution:
+ weight_distribution = 1-weight_distribution
+ v00 = all_verts[:,0,0]
+ v01 = all_verts[:,0,-1]
+ v10 = all_verts[:,-1,0]
+ v11 = all_verts[:,-1,-1]
+ face_weight = (weight_distribution[v00] + weight_distribution[v01] + weight_distribution[v10] + weight_distribution[v11])/4 * len(components)
+ if fill_mode == 'FAN' and consistent_wedges:
+ for i in range(n_original_faces):
+ face_mask = faces_id == i
+ face_weight[face_mask] = np.average(face_weight[face_mask])
+ face_weight = face_weight.clip(max=len(components)-1)
+ coll_materials = coll_materials.astype('float')
+ coll_materials = face_weight + (coll_materials - face_weight)*vertex_group_distribution_factor
+ coll_materials = coll_materials.astype('int')
- bool_correct = True
- new_patch = bpy.data.objects.new("patch", me1.copy())
- bpy.context.collection.objects.link(new_patch)
+ random.seed(rand_seed)
+ bool_correct = False
- new_patch.select_set(True)
- bpy.context.view_layer.objects.active = new_patch
+ tt = tissue_time(tt, "Reading Vertex Groups", levels=2)
- for area in bpy.context.screen.areas:
- for space in area.spaces:
- try: new_patch.local_view_set(space, True)
- except: pass
+ ### SMOOTH NORMALS
+ if smooth_normals:
+ weight_smooth_normals = 0.2
+ weight_smooth_normals0 = 0.2
+ if vertex_group_smooth_normals in ob0.vertex_groups.keys():
+ vg = ob0.vertex_groups[vertex_group_smooth_normals]
+ weight_smooth_normals0 = get_weight_numpy(vg, n_verts0)
+ if invert_vertex_group_smooth_normals:
+ weight_smooth_normals0 = 1-weight_smooth_normals0
+ weight_smooth_normals0 *= 0.2
- # Vertex Group
- if bool_vertex_group:
- for vg in ob0.vertex_groups:
- new_patch.vertex_groups.new(name=vg.name)
-
- # find patch faces
- faces = _faces.copy()
- verts = _verts.copy()
- shift1 = sides
- shift2 = sides*2-1
- shift3 = sides*3-2
- for j in range(patch_faces):
- if j < patch_faces0:
- if levels == 0:
- u = j%sides0
- v = j//sides0
- else:
- u = j%sides0+1
- v = j//sides0+1
- elif j < patch_faces0 + shift1:
- u = j-patch_faces0
- v = 0
- elif j < patch_faces0 + shift2:
- u = sides-1
- v = j-(patch_faces0 + sides)+1
- elif j < patch_faces0 + shift3:
- jj = j-(patch_faces0 + shift2)
- u = sides-jj-2
- v = sides-1
+ verts0_normal = mesh_diffusion_vector(me0, verts0_normal, smooth_normals_iter, weight_smooth_normals0, smooth_normals_uv)
+ '''
+ While in Relative thickness method the components are built
+ between the two surfaces, in Constant mode the thickness is uniform.
+ '''
+ if scale_mode == 'CONSTANT':
+ # Normalize vectors
+ verts0_normal /= np.linalg.norm(verts0_normal, axis=1).reshape((-1,1))
+ # Compare to the original normals direction
+ original_normals = get_normals_numpy(me0)
+ verts0_normal /= np.multiply(verts0_normal, original_normals).sum(1)[:,None]
+
+ tt = tissue_time(tt, "Smooth Normals", levels=2)
+
+ if normals_mode in ('FACES', 'VERTS'):
+ normals_x = props['normals_x']
+ normals_y = props['normals_y']
+ normals_z = props['normals_z']
+ if bool_weight_normals:
+ if invert_vertex_group_scale_normals:
+ weight_normals = 1-weight_normals
+ w_normals_x = 1 - weight_normals * (1 - normals_x)
+ w_normals_y = 1 - weight_normals * (1 - normals_y)
+ w_normals_z = 1 - weight_normals * (1 - normals_z)
+ else:
+ w_normals_x = normals_x
+ w_normals_y = normals_y
+ w_normals_z = normals_z
+ if normals_x < 1: verts0_normal[:,0] *= w_normals_x
+ if normals_y < 1: verts0_normal[:,1] *= w_normals_y
+ if normals_z < 1: verts0_normal[:,2] *= w_normals_z
+ div_value = np.linalg.norm(verts0_normal, axis=1).reshape((-1,1))
+ div_value[div_value == 0] = 0.00001
+ verts0_normal /= div_value
+
+ ### ROTATE PATCHES ###
+
+ if rotation_mode != 'DEFAULT' or rotation_shift != 0:
+
+ # Weight rotation
+ weight_shift = 0
+ if rotation_mode == 'WEIGHT':
+ corners_id = np.array(((0,0,-1,-1),(0,-1,-1,0)))
+ corners = all_verts[:,corners_id[0],corners_id[1]]
+ corners_weight = weight_rotation[corners]
+ if invert_vertex_group_rotation:
+ corners_weight = 1-corners_weight
+ ids4 = np.arange(4)
+ if rotation_direction == 'DIAG':
+ c0 = corners_weight[:,ids4]
+ c3 = corners_weight[:,(ids4+2)%4]
+ differential = c3 - c0
else:
- jj = j-(patch_faces0 + shift3)
- u = 0
- v = sides-jj-2
- face = me0.polygons[j+i*patch_faces]
- faces[u][v] = face
- verts[u][v] = verts0[face.vertices[0]]
- if u == sides-1:
- verts[sides][v] = verts0[face.vertices[1]]
- if v == sides-1:
- verts[u][sides] = verts0[face.vertices[3]]
- if u == v == sides-1:
- verts[sides][sides] = verts0[face.vertices[2]]
+ c0 = corners_weight[:,ids4]
+ c1 = corners_weight[:,(ids4+1)%4]
+ c2 = corners_weight[:,(ids4+2)%4]
+ c3 = corners_weight[:,(ids4+3)%4]
+ differential = - c0 + c1 + c2 - c3
+ weight_shift = np.argmax(differential, axis=1)
# Random rotation
+ random_shift = 0
if rotation_mode == 'RANDOM':
- rand = random.randint(0, 3)
- if rand == 1:
- verts = [[verts[k][w] for w in range(sides,-1,-1)] for k in range(sides,-1,-1)]
- elif rand == 2:
- verts = [[verts[w][k] for w in range(sides,-1,-1)] for k in range(sides+1)]
- elif rand == 3:
- verts = [[verts[w][k] for w in range(sides+1)] for k in range(sides,-1,-1)]
+ np.random.seed(rand_seed)
+ random_shift = np.random.randint(0,4,size=n_patches)*rand_step
# UV rotation
- elif rotation_mode == 'UV' and ob0.type == 'MESH':
- if len(ob0.data.uv_layers) > 0:
- uv0 = me0.uv_layers.active.data[faces[0][0].index*4].uv
- uv1 = me0.uv_layers.active.data[faces[0][-1].index*4 + 3].uv
- uv2 = me0.uv_layers.active.data[faces[-1][-1].index*4 + 2].uv
- uv3 = me0.uv_layers.active.data[faces[-1][0].index*4 + 1].uv
- v01 = (uv0 + uv1)
- v32 = (uv3 + uv2)
- v0132 = v32 - v01
- v0132.normalize()
- v12 = (uv1 + uv2)
- v03 = (uv0 + uv3)
- v1203 = v03 - v12
- v1203.normalize()
-
- vertUV = []
- dot1203 = v1203.x
- dot0132 = v0132.x
- if(abs(dot1203) < abs(dot0132)):
- if (dot0132 > 0):
- pass
- else:
- verts = [[verts[k][w] for w in range(sides,-1,-1)] for k in range(sides,-1,-1)]
- else:
- if(dot1203 < 0):
- verts = [[verts[w][k] for w in range(sides,-1,-1)] for k in range(sides+1)]
- else:
- verts = [[verts[w][k] for w in range(sides+1)] for k in range(sides,-1,-1)]
-
- step = 1/sides
- for vert, patch_vert in zip(verts1, new_patch.data.vertices):
- # grid coordinates
- u = int(vert[0]//step)
- v = int(vert[1]//step)
- u1 = min(u+1, sides)
- v1 = min(v+1, sides)
- if mode != 'BOUNDS':
- if u > sides-1:
- u = sides-1
- u1 = sides
- if u < 0:
- u = 0
- u1 = 1
- if v > sides-1:
- v = sides-1
- v1 = sides
- if v < 0:
- v = 0
- v1 = 1
- v00 = verts[u][v]
- v10 = verts[u1][v]
- v01 = verts[u][v1]
- v11 = verts[u1][v1]
- # factor coordinates
- fu = (vert[0]-u*step)/step
- fv = (vert[1]-v*step)/step
- fw = vert.z
- # interpolate Z scaling factor
- fvec2d = Vector((fu,fv,0))
- if scale_mode == 'ADAPTIVE':
- a00 = verts_area[v00.index]
- a10 = verts_area[v10.index]
- a01 = verts_area[v01.index]
- a11 = verts_area[v11.index]
- fw*=lerp2(a00,a10,a01,a11,fvec2d)
- # build factor vector
- fvec = Vector((fu,fv,fw))
- # interpolate vertex on patch
- patch_vert.co = lerp3(v00, v10, v01, v11, fvec)
-
- # Vertex Group
- if bool_vertex_group:
- for _weight, vg in zip(weight, new_patch.vertex_groups):
- w00 = _weight[v00.index]
- w10 = _weight[v10.index]
- w01 = _weight[v01.index]
- w11 = _weight[v11.index]
- wuv = lerp2(w00,w10,w01,w11, fvec2d)
- vg.add([patch_vert.index], wuv, "ADD")
+ UV_shift = 0
+ if rotation_mode == 'UV' and ob0.type == 'MESH':
+ bm = bmesh.new()
+ bm.from_mesh(before_subsurf)
+ uv_lay = bm.loops.layers.uv.active
+ UV_shift = [0]*len(mask)
+ for f in bm.faces:
+ ll = f.loops
+ if len(ll) == 4:
+ uv0 = ll[0][uv_lay].uv
+ uv1 = ll[3][uv_lay].uv
+ uv2 = ll[2][uv_lay].uv
+ uv3 = ll[1][uv_lay].uv
+
+ v01 = (uv0 + uv1) # not necessary to divide by 2
+ v32 = (uv3 + uv2)
+ v0132 = v32 - v01 # axis vector 1
+ v0132.normalize() # based on the rotation not on the size
+ v12 = (uv1 + uv2)
+ v03 = (uv0 + uv3)
+ v1203 = v03 - v12 # axis vector 2
+ v1203.normalize() # based on the rotation not on the size
+
+ dot1203 = v1203.x
+ dot0132 = v0132.x
+ if(abs(dot1203) < abs(dot0132)): # already vertical
+ if (dot0132 > 0): shift = 0
+ else: shift = 2 # rotate 180°
+ else: # horizontal
+ if(dot1203 < 0): shift = 3
+ else: shift = 1
+ #UV_shift.append(shift)
+ UV_shift[f.index] = shift
+
+ UV_shift = np.array(UV_shift)[mask]
+ bm.free()
+
+ # Rotate Patch
+ rotation_shift = np.zeros((n_patches))+rotation_shift
+ rot = weight_shift + random_shift + UV_shift + rotation_shift
+ rot = rot%4
+ flip_u = np.logical_or(rot==2,rot==3)
+ flip_v = np.logical_or(rot==1,rot==2)
+ flip_uv = np.logical_or(rot==1,rot==3)
+ all_verts[flip_u] = all_verts[flip_u,::-1,:]
+ all_verts[flip_v] = all_verts[flip_v,:,::-1]
+ all_verts[flip_uv] = np.transpose(all_verts[flip_uv],(0,2,1))
+
+ tt = tissue_time(tt, "Rotations", levels=2)
+
+ #for o in bpy.context.view_layer.objects: o.select_set(False)
+ new_patch = None
+
+ ### COMPONENT ###
+ new_objects = []
+
+ # Store original values
+ _com_modifiers = com_modifiers
+ _bool_shapekeys = bool_shapekeys
+
+ for mat_id, _ob1 in enumerate(components):
+ if _ob1 == None: continue
+
+ # Set original values (for next commponents)
+ com_modifiers = _com_modifiers
+ bool_shapekeys = _bool_shapekeys
+
+ if component_mode != 'OBJECT':
+ if component_mode == 'COLLECTION':
+ mat_mask = coll_materials == mat_id
+ else:
+ mat_mask = materials == mat_id
+ if bool_material_id:
+ mat_mask = np.logical_and(mat_mask, materials == material_id)
+ masked_verts = all_verts[mat_mask]
+ masked_faces = mat_mask
+ elif bool_material_id:
+ masked_verts = all_verts[materials == material_id]
+ masked_faces = np.logical_and(mask, materials == material_id)
+ else:
+ masked_verts = all_verts
+ masked_faces = mask
+ n_patches = len(masked_verts)
+ if n_patches == 0: continue
+
+ if com_modifiers or _ob1.type != 'MESH': bool_shapekeys = False
+
+ # set Shape Keys to zero
+ original_key_values = None
+ if (bool_shapekeys or not com_modifiers) and _ob1.type == 'MESH':
+ if _ob1.data.shape_keys:
+ original_key_values = []
+ for sk in _ob1.data.shape_keys.key_blocks:
+ original_key_values.append(sk.value)
+ sk.value = 0
+ else:
+ bool_shapekeys = False
+ else: bool_shapekeys = False
+
+ if not com_modifiers and not bool_shapekeys:
+ mod_visibility = []
+ for m in _ob1.modifiers:
+ mod_visibility.append(m.show_viewport)
+ m.show_viewport = False
+ com_modifiers = True
+ ob1 = convert_object_to_mesh(_ob1, com_modifiers, False)
+ ob1, com_area = tessellate_prepare_component(ob1, props)
+ ob1.name = "_tissue_tmp_ob1"
+
+ # restore original modifiers visibility for component object
+ try:
+ for m, vis in zip(_ob1.modifiers, mod_visibility):
+ m.show_viewport = vis
+ except: pass
+
+ me1 = ob1.data
+ verts1 = [v.co for v in me1.vertices]
+ n_verts1 = len(verts1)
+ if n_verts1 == 0:
+ bpy.data.objects.remove(ob1)
+ continue
+
+ ### COMPONENT GRID COORDINATES ###
+
+ # find relative UV component's vertices
+ if fill_mode == 'PATCH':
+ verts1_uv_quads = [0]*n_verts1
+ verts1_uv = [0]*n_verts1
+ for i, vert in enumerate(verts1):
+ # grid coordinates
+ u = int(vert[0]//step)
+ v = int(vert[1]//step)
+ u1 = min(u+1, sides)
+ v1 = min(v+1, sides)
+ if mode != 'BOUNDS':
+ if u > sides-1:
+ u = sides-1
+ u1 = sides
+ if u < 0:
+ u = 0
+ u1 = 1
+ if v > sides-1:
+ v = sides-1
+ v1 = sides
+ if v < 0:
+ v = 0
+ v1 = 1
+ verts1_uv_quads[i] = (u,v,u1,v1)
+ # factor coordinates
+ fu = (vert[0]-u*step)/step
+ fv = (vert[1]-v*step)/step
+ fw = vert.z
+ # interpolate Z scaling factor
+ verts1_uv[i] = Vector((fu,fv,fw))
+ else:
+ verts1_uv = verts1
if bool_shapekeys:
- for sk in ob1.data.shape_keys.key_blocks:
+ sk_uv_quads = []
+ sk_uv = []
+ for sk in ob1.data.shape_keys.key_blocks[1:]:
source = sk.data
- for sk_v, _v in zip(source, me1.vertices):
- if mode == 'BOUNDS':
- sk_vert = sk_v.co - min_c # (ob1.matrix_world * v.co) - min_c
- sk_vert[0] = (sk_vert[0] / bb[0] if bb[0] != 0 else 0.5)
- sk_vert[1] = (sk_vert[1] / bb[1] if bb[1] != 0 else 0.5)
- sk_vert[2] = (sk_vert[2] + (-0.5 + offset * 0.5) * bb[2]) * zscale
- elif mode == 'LOCAL':
- sk_vert = sk_v.co#.xyzco
- #sk_vert[2] *= zscale
- #sk_vert[2] = (sk_vert[2] - min_c[2] + (-0.5 + offset * 0.5) * bb[2]) * zscale
- elif mode == 'GLOBAL':
- #sk_vert = ob1.matrix_world @ sk_v.co
- sk_vert = sk_v.co
- #sk_vert[2] *= zscale
+ _sk_uv_quads = [0]*n_verts1
+ _sk_uv = [0]*n_verts1
+ for i, sk_v in enumerate(source):
+ sk_vert = sk_v.co
# grid coordinates
u = int(sk_vert[0]//step)
@@ -910,794 +698,340 @@ def tessellate_patch(_ob0, _ob1, offset, zscale, com_modifiers, mode,
if v < 0:
v = 0
v1 = 1
- v00 = verts[u][v]
- v10 = verts[u1][v]
- v01 = verts[u][v1]
- v11 = verts[u1][v1]
+ _sk_uv_quads[i] = (u,v,u1,v1)
# factor coordinates
fu = (sk_vert[0]-u*step)/step
fv = (sk_vert[1]-v*step)/step
fw = sk_vert.z
-
- if scale_mode == 'ADAPTIVE':
- a00 = verts_area[v00.index]
- a10 = verts_area[v10.index]
- a01 = verts_area[v01.index]
- a11 = verts_area[v11.index]
- fw*=lerp2(a00,a10,a01,a11,Vector((fu,fv,0)))
-
- fvec = Vector((fu,fv,fw))
- sk_co = lerp3(v00, v10, v01, v11, fvec)
-
- new_patch.data.shape_keys.key_blocks[sk.name].data[_v.index].co = sk_co
-
- #if ob0.type == 'MESH': ob0.data = old_me0
- if not bool_correct: return 0
-
- bpy.ops.object.join()
-
- if bool_shapekeys:
- # set original values and combine Shape Keys and Vertex Groups
- for sk, val in zip(_ob1.data.shape_keys.key_blocks, original_key_values):
- sk.value = val
- new_patch.data.shape_keys.key_blocks[sk.name].value = val
+ _sk_uv[i] = Vector((fu,fv,fw))
+ sk_uv_quads.append(_sk_uv_quads)
+ sk_uv.append(_sk_uv)
+ store_sk_coordinates = [[] for t in ob1.data.shape_keys.key_blocks[1:]]
+ sk_uv_quads = np.array(sk_uv_quads)
+ sk_uv = np.array(sk_uv)
+
+ np_verts1_uv = np.array(verts1_uv)
+ if fill_mode == 'PATCH':
+ verts1_uv_quads = np.array(verts1_uv_quads)
+ np_u = verts1_uv_quads[:,0]
+ np_v = verts1_uv_quads[:,1]
+ np_u1 = verts1_uv_quads[:,2]
+ np_v1 = verts1_uv_quads[:,3]
+ else:
+ np_u = 0
+ np_v = 0
+ np_u1 = 1
+ np_v1 = 1
+
+ tt = tissue_time(tt, "Component preparation", levels=2)
+
+ ### DEFORM PATCHES ###
+
+ verts_xyz = verts0_co[masked_verts]
+ v00 = verts_xyz[:, np_u, np_v].reshape((n_patches,-1,3))
+ v10 = verts_xyz[:, np_u1, np_v].reshape((n_patches,-1,3))
+ v01 = verts_xyz[:, np_u, np_v1].reshape((n_patches,-1,3))
+ v11 = verts_xyz[:, np_u1, np_v1].reshape((n_patches,-1,3))
+ vx = np_verts1_uv[:,0].reshape((1,n_verts1,1))
+ vy = np_verts1_uv[:,1].reshape((1,n_verts1,1))
+ vz = np_verts1_uv[:,2].reshape((1,n_verts1,1))
+ co2 = np_lerp2(v00, v10, v01, v11, vx, vy, 'verts')
+
+ ### PATCHES WEIGHT ###
+ weight_thickness = None
if bool_vertex_group:
- for sk in new_patch.data.shape_keys.key_blocks:
- for vg in new_patch.vertex_groups:
- if sk.name == vg.name:
- sk.vertex_group = vg.name
-
- new_name = ob0.name + "_" + ob1.name
- new_patch.name = "tessellate_temp"
-
- if bool_multires:
- for m in ob0.modifiers:
- if m.type == 'MULTIRES' and m.name == multires_name:
- m.levels = levels
- m.sculpt_levels = sculpt_levels
- m.render_levels = render_levels
- # restore original modifiers visibility for component object
- try:
- for m, vis in zip(_ob1.modifiers, mod_visibility):
- m.show_viewport = vis
- except: pass
-
- bpy.data.objects.remove(before)
- bpy.data.objects.remove(ob0)
- bpy.data.objects.remove(ob1)
- return new_patch
-
-def tessellate_original(_ob0, _ob1, offset, zscale, gen_modifiers, com_modifiers, mode,
- scale_mode, rotation_mode, rand_seed, fill_mode,
- bool_vertex_group, bool_selection, bool_shapekeys,
- bool_material_id, material_id, normals_mode, bounds_x, bounds_y):
-
- if com_modifiers or _ob1.type != 'MESH': bool_shapekeys = False
- random.seed(rand_seed)
-
- if bool_shapekeys:
- try:
- original_key_values = []
- for sk in _ob1.data.shape_keys.key_blocks:
- original_key_values.append(sk.value)
- sk.value = 0
- except:
- bool_shapekeys = False
-
- ob0 = convert_object_to_mesh(_ob0, gen_modifiers, True)
- me0 = ob0.data
- ob1 = convert_object_to_mesh(_ob1, com_modifiers, True)
- me1 = ob1.data
-
- base_polygons = []
- base_face_normals = []
-
- n_faces0 = len(me0.polygons)
-
- # Check if zero faces are selected
- if (bool_selection and ob0.type == 'MESH') or bool_material_id:
- for p in me0.polygons:
- if (bool_selection and ob0.type == 'MESH'):
- is_sel = p.select
- else: is_sel = True
- if bool_material_id:
- is_mat = p.material_index == material_id
- else: is_mat = True
- if is_sel and is_mat:
- base_polygons.append(p)
- base_face_normals.append(p.normal)
- else:
- base_polygons = me0.polygons
- base_face_normals = [p.normal for p in me0.polygons]
-
- # numpy test: slower
- #base_face_normals = np.zeros(n_faces0*3)
- #me0.polygons.foreach_get("normal", base_face_normals)
- #base_face_normals = base_face_normals.reshape((n_faces0,3))
-
- if len(base_polygons) == 0:
- return 0
-
- if mode != 'BOUNDS':
-
- bpy.ops.object.select_all(action='DESELECT')
- for o in bpy.context.view_layer.objects: o.select_set(False)
- bpy.context.view_layer.objects.active = ob1
- ob1.select_set(True)
- bpy.context.object.active_shape_key_index = 0
- # Bound X
- if bounds_x != 'EXTEND':
- if mode == 'GLOBAL':
- planes_co = ((0,0,0),(1,1,1))
- plane_no = (1,0,0)
- if mode == 'LOCAL':
- planes_co = (ob1.matrix_world @ Vector((0,0,0)), ob1.matrix_world @ Vector((1,0,0)))
- plane_no = planes_co[0]-planes_co[1]
- bpy.ops.object.mode_set(mode='EDIT')
- for co in planes_co:
- bpy.ops.mesh.select_all(action='SELECT')
- bpy.ops.mesh.bisect(plane_co=co, plane_no=plane_no)
- bpy.ops.mesh.mark_seam()
- bpy.ops.object.mode_set(mode='OBJECT')
- _faces = ob1.data.polygons
- if mode == 'GLOBAL':
- for f in [f for f in _faces if (ob1.matrix_world @ f.center).x > 1]:
- f.select = True
- for f in [f for f in _faces if (ob1.matrix_world @ f.center).x < 0]:
- f.select = True
- else:
- for f in [f for f in _faces if f.center.x > 1]:
- f.select = True
- for f in [f for f in _faces if f.center.x < 0]:
- f.select = True
- bpy.ops.object.mode_set(mode='EDIT')
- bpy.ops.mesh.select_mode(type='FACE')
- if bounds_x == 'CLIP':
- bpy.ops.mesh.delete(type='FACE')
- bpy.ops.object.mode_set(mode='OBJECT')
- if bounds_x == 'CYCLIC':
- bpy.ops.mesh.split()
- bpy.ops.object.mode_set(mode='OBJECT')
- # Bound Y
- if bounds_y != 'EXTEND':
- if mode == 'GLOBAL':
- planes_co = ((0,0,0),(1,1,1))
- plane_no = (0,1,0)
- if mode == 'LOCAL':
- planes_co = (ob1.matrix_world @ Vector((0,0,0)), ob1.matrix_world @ Vector((0,1,0)))
- plane_no = planes_co[0]-planes_co[1]
- bpy.ops.object.mode_set(mode='EDIT')
- for co in planes_co:
- bpy.ops.mesh.select_all(action='SELECT')
- bpy.ops.mesh.bisect(plane_co=co, plane_no=plane_no)
- bpy.ops.mesh.mark_seam()
- bpy.ops.object.mode_set(mode='OBJECT')
- _faces = ob1.data.polygons
- if mode == 'GLOBAL':
- for f in [f for f in _faces if (ob1.matrix_world @ f.center).y > 1]:
- f.select = True
- for f in [f for f in _faces if (ob1.matrix_world @ f.center).y < 0]:
- f.select = True
- else:
- for f in [f for f in _faces if f.center.y > 1]:
- f.select = True
- for f in [f for f in _faces if f.center.y < 0]:
- f.select = True
-
- bpy.ops.object.mode_set(mode='EDIT')
- bpy.ops.mesh.select_mode(type='FACE')
- if bounds_y == 'CLIP':
- bpy.ops.mesh.delete(type='FACE')
- bpy.ops.object.mode_set(mode='OBJECT')
- if bounds_y == 'CYCLIC':
- bpy.ops.mesh.split()
- bpy.ops.object.mode_set(mode='OBJECT')
- bpy.ops.object.mode_set(mode='OBJECT')
- #ob1 = new_ob1
-
- me1 = ob1.data
-
- verts0 = me0.vertices # Collect generator vertices
-
- # Component statistics
- n_verts1 = len(me1.vertices)
- n_edges1 = len(me1.edges)
- n_faces1 = len(me1.polygons)
-
- # Create empty lists
- new_verts = []
- new_edges = []
- new_faces = []
- new_verts_np = np.array(())
-
- # Component Coordinates
- co1 = [0]*n_verts1*3
-
- if mode == 'GLOBAL':
- for v in me1.vertices:
- v.co = ob1.matrix_world @ v.co
- try:
- for sk in me1.shape_keys.key_blocks:
- sk.data[v.index].co = ob1.matrix_world @ sk.data[v.index].co
- except: pass
- if mode != 'BOUNDS':
- if bounds_x == 'CYCLIC':
- move_verts = []
- for f in [f for f in me1.polygons if (f.center).x > 1]:
- for v in f.vertices:
- if v not in move_verts: move_verts.append(v)
- for v in move_verts:
- me1.vertices[v].co.x -= 1
- try:
- _ob1.active_shape_key_index = 0
- for sk in me1.shape_keys.key_blocks:
- sk.data[v].co.x -= 1
- except: pass
- move_verts = []
- for f in [f for f in me1.polygons if (f.center).x < 0]:
- for v in f.vertices:
- if v not in move_verts: move_verts.append(v)
- for v in move_verts:
- me1.vertices[v].co.x += 1
- try:
- _ob1.active_shape_key_index = 0
- for sk in me1.shape_keys.key_blocks:
- sk.data[v].co.x += 1
- except: pass
- if bounds_y == 'CYCLIC':
- move_verts = []
- for f in [f for f in me1.polygons if (f.center).y > 1]:
- for v in f.vertices:
- if v not in move_verts: move_verts.append(v)
- for v in move_verts:
- me1.vertices[v].co.y -= 1
+ n_vg = len(weight)
+ patches_weight = weight[:, masked_verts]
+ w00 = patches_weight[:, :, np_u, np_v].reshape((n_vg, n_patches,-1,1))
+ w10 = patches_weight[:, :, np_u1, np_v].reshape((n_vg, n_patches,-1,1))
+ w01 = patches_weight[:, :, np_u, np_v1].reshape((n_vg, n_patches,-1,1))
+ w11 = patches_weight[:, :, np_u1, np_v1].reshape((n_vg, n_patches,-1,1))
+ store_weight = np_lerp2(w00,w10,w01,w11,vx[None,:,:,:],vy[None,:,:,:],'weight')
+
+ if vertex_group_thickness in ob0.vertex_groups.keys():
+ vg_id = ob0.vertex_groups[vertex_group_thickness].index
+ weight_thickness = store_weight[vg_id,:,:]
+ if vertex_group_smooth_normals in ob0.vertex_groups.keys():
+ vg_id = ob0.vertex_groups[vertex_group_smooth_normals].index
+ weight_smooth_normals = store_weight[vg_id,:,:]
+ else:
+ # Read vertex group Thickness
+ if vertex_group_thickness in ob0.vertex_groups.keys():
+ vg = ob0.vertex_groups[vertex_group_thickness]
+ weight_thickness = get_weight_numpy(vg, n_verts0)
+ wt = weight_thickness[masked_verts]
+ wt = wt[:,:,:,np.newaxis]
+ w00 = wt[:, np_u, np_v].reshape((n_patches, -1, 1))
+ w10 = wt[:, np_u1, np_v].reshape((n_patches, -1, 1))
+ w01 = wt[:, np_u, np_v1].reshape((n_patches, -1, 1))
+ w11 = wt[:, np_u1, np_v1].reshape((n_patches, -1, 1))
+ weight_thickness = np_lerp2(w00,w10,w01,w11,vx,vy,'verts')
try:
- #new_ob1.active_shape_key_index = 0
- for sk in me1.shape_keys.key_blocks:
- sk.data[v].co.y -= 1
+ weight_thickness.shape
+ if invert_vertex_group_thickness:
+ weight_thickness = 1-weight_thickness
+ fact = vertex_group_thickness_factor
+ if fact > 0:
+ weight_thickness = weight_thickness*(1-fact) + fact
except: pass
- move_verts = []
- for f in [f for f in me1.polygons if (f.center).y < 0]:
- for v in f.vertices:
- if v not in move_verts: move_verts.append(v)
- for v in move_verts:
- me1.vertices[v].co.y += 1
+
+ # Read vertex group smooth normals
+ if vertex_group_smooth_normals in ob0.vertex_groups.keys():
+ vg = ob0.vertex_groups[vertex_group_smooth_normals]
+ weight_smooth_normals = get_weight_numpy(vg, n_verts0)
+ wt = weight_smooth_normals[masked_verts]
+ wt = wt[:,:,:,None]
+ w00 = wt[:, np_u, np_v].reshape((n_patches, -1, 1))
+ w10 = wt[:, np_u1, np_v].reshape((n_patches, -1, 1))
+ w01 = wt[:, np_u, np_v1].reshape((n_patches, -1, 1))
+ w11 = wt[:, np_u1, np_v1].reshape((n_patches, -1, 1))
+ weight_smooth_normals = np_lerp2(w00,w10,w01,w11,vx,vy,'verts')
try:
- #new_ob1.active_shape_key_index = 0
- for sk in me1.shape_keys.key_blocks:
- sk.data[v].co.y += 1
+ weight_smooth_normals.shape
+ if invert_vertex_group_smooth_normals:
+ weight_smooth_normals = 1-weight_smooth_normals
+ #fact = vertex_group_thickness_factor
+ #if fact > 0:
+ # weight_thickness = weight_thickness*(1-fact) + fact
except: pass
+ if normals_mode == 'FACES':
+ n2 = get_attribute_numpy(before_subsurf.polygons,'normal',3)
+ n2 = n2[masked_faces][:,None,:]
+ else:
+ if normals_mode == 'CUSTOM':
+ me0.calc_normals_split()
+ normals_split = [0]*len(me0.loops)*3
+ vertex_indexes = [0]*len(me0.loops)
+ me0.loops.foreach_get('normal', normals_split)
+ me0.loops.foreach_get('vertex_index', vertex_indexes)
+ normals_split = np.array(normals_split).reshape(-1,3)
+ vertex_indexes = np.array(vertex_indexes)
+ verts0_normal = np.zeros((len(me0.vertices),3))
+ np.add.at(verts0_normal, vertex_indexes, normals_split)
+ indexes, counts = np.unique(vertex_indexes,return_counts=True)
+ verts0_normal[indexes] /= counts[:,np.newaxis]
+
+ if 'Eval_Normals' in me1.uv_layers.keys():
+ bm1 = bmesh.new()
+ bm1.from_mesh(me1)
+ uv_co = np.array(uv_from_bmesh(bm1, 'Eval_Normals'))
+ vx_nor = uv_co[:,0]#.reshape((1,n_verts1,1))
+ #vy_nor = uv_co[:,1]#.reshape((1,n_verts1,1))
+
+ # grid coordinates
+ np_u = np.clip(vx_nor//step, 0, sides).astype('int')
+ #np_v = np.maximum(vy_nor//step, 0).astype('int')
+ np_u1 = np.clip(np_u+1, 0, sides).astype('int')
+ #np_v1 = np.minimum(np_v+1, sides).astype('int')
+
+ vx_nor = (vx_nor - np_u * step)/step
+ #vy_nor = (vy_nor - np_v * step)/step
+ vx_nor = vx_nor.reshape((1,n_verts1,1))
+ #vy_nor = vy_nor.reshape((1,n_verts1,1))
+ vy_nor = vy
+ bm1.free()
+ else:
+ vx_nor = vx
+ vy_nor = vy
+
+ if normals_mode in ('SHAPEKEYS','OBJECT') and scale_mode == 'CONSTANT' and even_thickness:
+ verts_norm_pos = verts0_normal_pos[masked_verts]
+ verts_norm_neg = verts0_normal_neg[masked_verts]
+ nor_mask = (vz<0).reshape((-1))
+ n00 = verts_norm_pos[:, np_u, np_v].reshape((n_patches,-1,3))
+ n10 = verts_norm_pos[:, np_u1, np_v].reshape((n_patches,-1,3))
+ n01 = verts_norm_pos[:, np_u, np_v1].reshape((n_patches,-1,3))
+ n11 = verts_norm_pos[:, np_u1, np_v1].reshape((n_patches,-1,3))
+ n00_neg = verts_norm_neg[:, np_u, np_v].reshape((n_patches,-1,3))
+ n10_neg = verts_norm_neg[:, np_u1, np_v].reshape((n_patches,-1,3))
+ n01_neg = verts_norm_neg[:, np_u, np_v1].reshape((n_patches,-1,3))
+ n11_neg = verts_norm_neg[:, np_u1, np_v1].reshape((n_patches,-1,3))
+ n00[:,nor_mask] = n00_neg[:,nor_mask]
+ n10[:,nor_mask] = n10_neg[:,nor_mask]
+ n01[:,nor_mask] = n01_neg[:,nor_mask]
+ n11[:,nor_mask] = n11_neg[:,nor_mask]
+ else:
+ verts_norm = verts0_normal[masked_verts]
+ n00 = verts_norm[:, np_u, np_v].reshape((n_patches,-1,3))
+ n10 = verts_norm[:, np_u1, np_v].reshape((n_patches,-1,3))
+ n01 = verts_norm[:, np_u, np_v1].reshape((n_patches,-1,3))
+ n11 = verts_norm[:, np_u1, np_v1].reshape((n_patches,-1,3))
+ n2 = np_lerp2(n00, n10, n01, n11, vx_nor, vy_nor, 'verts')
+
+ # thickness variation
+ mean_area = []
+ a2 = None
+ if scale_mode == 'ADAPTIVE' and normals_mode not in ('SHAPEKEYS','OBJECT'):
+ #com_area = bb[0]*bb[1]
+ if mode != 'BOUNDS' or com_area == 0: com_area = 1
+ if normals_mode == 'FACES':
+ if levels == 0 and True:
+ areas = [0]*len(mask)
+ before_subsurf.polygons.foreach_get('area',areas)
+ areas = np.sqrt(np.array(areas)/com_area)[masked_faces]
+ a2 = areas[:,None,None]
+ else:
+ areas = calc_verts_area_bmesh(me0)
+ verts_area = np.sqrt(areas*patch_faces/com_area)
+ verts_area = verts_area[masked_verts]
+ verts_area = verts_area.mean(axis=(1,2)).reshape((n_patches,1,1))
+ a2 = verts_area
+ else:
+ areas = calc_verts_area_bmesh(me0)
+ verts_area = np.sqrt(areas*patch_faces/com_area)
+ verts_area = verts_area[masked_verts]
+ a00 = verts_area[:, np_u, np_v].reshape((n_patches,-1,1))
+ a10 = verts_area[:, np_u1, np_v].reshape((n_patches,-1,1))
+ a01 = verts_area[:, np_u, np_v1].reshape((n_patches,-1,1))
+ a11 = verts_area[:, np_u1, np_v1].reshape((n_patches,-1,1))
+ # remapped z scale
+ a2 = np_lerp2(a00,a10,a01,a11,vx,vy,'verts')
- me1.vertices.foreach_get("co", co1)
- co1 = np.array(co1)
- vx = co1[0::3].reshape((n_verts1,1))
- vy = co1[1::3].reshape((n_verts1,1))
- vz = co1[2::3].reshape((n_verts1,1))
- min_c = Vector((vx.min(), vy.min(), vz.min())) # Min BB Corner
- max_c = Vector((vx.max(), vy.max(), vz.max())) # Max BB Corner
- bb = max_c - min_c # Bounding Box
-
- # Component Coordinates
- if mode == 'BOUNDS':
- vx = (vx - min_c[0]) / bb[0] if bb[0] != 0 else 0.5
- vy = (vy - min_c[1]) / bb[1] if bb[1] != 0 else 0.5
- vz = ((vz - min_c[2]) + (-0.5 + offset * 0.5) * bb[2]) * zscale
- else:
- vz *= zscale
-
- # Component polygons
- fs1 = [[i for i in p.vertices] for p in me1.polygons]
- new_faces = fs1[:]
-
- # Component edges
- es1 = np.array([[i for i in e.vertices] for e in me1.edges])
- #es1 = [[i for i in e.vertices] for e in me1.edges if e.is_loose]
- new_edges = es1[:]
-
- # SHAPE KEYS
- if bool_shapekeys:
- basis = True #com_modifiers
- vx_key = []
- vy_key = []
- vz_key = []
- sk_np = []
- for sk in ob1.data.shape_keys.key_blocks:
- do_shapekeys = True
- # set all keys to 0
- for _sk in ob1.data.shape_keys.key_blocks: _sk.value = 0
- sk.value = 1
-
- if basis:
- basis = False
- continue
+ store_coordinates = calc_thickness(co2,n2,vz,a2,weight_thickness)
+ co2 = n2 = vz = a2 = None
- # Apply component modifiers
- if com_modifiers:
- sk_ob = convert_object_to_mesh(_ob1)
- sk_data = sk_ob.data
- source = sk_data.vertices
+ if bool_shapekeys:
+ tt_sk = time.time()
+ n_sk = len(sk_uv_quads)
+ # ids of face corners for each vertex (n_sk, n_verts1, 4)
+ np_u = np.clip(sk_uv_quads[:,:,0], 0, sides).astype('int')[:,None,:]
+ np_v = np.clip(sk_uv_quads[:,:,1], 0, sides).astype('int')[:,None,:]
+ np_u1 = np.clip(sk_uv_quads[:,:,2], 0, sides).astype('int')[:,None,:]
+ np_v1 = np.clip(sk_uv_quads[:,:,3], 0, sides).astype('int')[:,None,:]
+ print(np_v1)
+ # face corners for each vertex (n_patches, n_sk, n_verts1, 4)
+ v00 = verts_xyz[:,np_u,np_v].reshape((n_patches,n_sk,n_verts1,3))#.swapaxes(0,1)
+ v10 = verts_xyz[:,np_u1,np_v].reshape((n_patches,n_sk,n_verts1,3))#.swapaxes(0,1)
+ v01 = verts_xyz[:,np_u,np_v1].reshape((n_patches,n_sk,n_verts1,3))#.swapaxes(0,1)
+ v11 = verts_xyz[:,np_u1,np_v1].reshape((n_patches,n_sk,n_verts1,3))#.swapaxes(0,1)
+ vx = sk_uv[:,:,0].reshape((1,n_sk,n_verts1,1))
+ vy = sk_uv[:,:,1].reshape((1,n_sk,n_verts1,1))
+ vz = sk_uv[:,:,2].reshape((1,n_sk,n_verts1,1))
+ co2 = np_lerp2(v00,v10,v01,v11,vx,vy,mode='shapekeys')
+
+ if normals_mode == 'FACES':
+ n2 = n2[None,:,:,:]
else:
- source = sk.data
- shapekeys = []
- for v in source:
- if mode == 'BOUNDS':
- vert = v.co - min_c
- vert[0] = vert[0] / bb[0]
- vert[1] = vert[1] / bb[1]
- vert[2] = (vert[2] + (-0.5 + offset * 0.5) * bb[2]) * zscale
- elif mode == 'LOCAL':
- vert = v.co.xyz
- vert[2] *= zscale
- #vert[2] = (vert[2] - min_c[2] + (-0.5 + offset * 0.5) * bb[2]) * \
- # zscale
- elif mode == 'GLOBAL':
- vert = v.co.xyz
- #vert = ob1.matrix_world @ v.co
- vert[2] *= zscale
- shapekeys.append(vert)
-
- # Component vertices
- key1 = np.array([v for v in shapekeys]).reshape(len(shapekeys), 3, 1)
- vx_key.append(key1[:, 0])
- vy_key.append(key1[:, 1])
- vz_key.append(key1[:, 2])
- #sk_np.append([])
-
- # All vertex group
- if bool_vertex_group:
- try:
- weight = []
- vertex_groups = ob0.vertex_groups
- for vg in vertex_groups:
- _weight = []
- for v in me0.vertices:
- try:
- _weight.append(vg.weight(v.index))
- except:
- _weight.append(0)
- weight.append(_weight)
- except:
- bool_vertex_group = False
-
- # Adaptive Z
- if scale_mode == 'ADAPTIVE':
- if mode == 'BOUNDS': com_area = (bb[0]*bb[1])
- else: com_area = 1
- if com_area == 0: mult = 1
- else: mult = 1/com_area
- verts_area = []
- bm = bmesh.new()
- bm.from_mesh(me0)
- bm.verts.ensure_lookup_table()
- for v in bm.verts:
- area = 0
- faces = v.link_faces
- for f in faces:
- area += f.calc_area()
- try:
- area/=len(faces)
- area*=mult
- verts_area.append(sqrt(area))
- except:
- verts_area.append(1)
-
- # FAN tessellation mode
- if fill_mode == 'FAN':
- fan_verts = [v.co.to_tuple() for v in me0.vertices]
- fan_polygons = []
- fan_select = []
- fan_material = []
- fan_normals = []
- # selected_faces = []
- for p in base_polygons:
- fan_center = Vector((0, 0, 0))
- center_area = 0
- for v in p.vertices:
- fan_center += me0.vertices[v].co
- if scale_mode == 'ADAPTIVE':
- center_area += verts_area[v]
- fan_center /= len(p.vertices)
- center_area /= len(p.vertices)
-
- last_vert = len(fan_verts)
- fan_verts.append(fan_center.to_tuple())
- #fan_verts.append(fan_center)
- if scale_mode == 'ADAPTIVE':
- verts_area.append(center_area)
-
- # Vertex Group
- if bool_vertex_group:
- for w in weight:
- center_weight = sum([w[i] for i in p.vertices]) / len(p.vertices)
- w.append(center_weight)
+ if normals_mode in ('SHAPEKEYS','OBJECT') and scale_mode == 'CONSTANT' and even_thickness:
+ verts_norm_pos = verts0_normal_pos[masked_verts]
+ verts_norm_neg = verts0_normal_neg[masked_verts]
+ nor_mask = (vz<0).reshape((-1))
+ n00 = verts_norm_pos[:, np_u, np_v].reshape((n_patches,n_sk,n_verts1,3))
+ n10 = verts_norm_pos[:, np_u1, np_v].reshape((n_patches,n_sk,n_verts1,3))
+ n01 = verts_norm_pos[:, np_u, np_v1].reshape((n_patches,n_sk,n_verts1,3))
+ n11 = verts_norm_pos[:, np_u1, np_v1].reshape((n_patches,n_sk,n_verts1,3))
+ n00_neg = verts_norm_neg[:, np_u, np_v].reshape((n_patches,n_sk,n_verts1,3))
+ n10_neg = verts_norm_neg[:, np_u1, np_v].reshape((n_patches,n_sk,n_verts1,3))
+ n01_neg = verts_norm_neg[:, np_u, np_v1].reshape((n_patches,n_sk,n_verts1,3))
+ n11_neg = verts_norm_neg[:, np_u1, np_v1].reshape((n_patches,n_sk,n_verts1,3))
+ n00[:,:,nor_mask] = n00_neg[:,:,nor_mask]
+ n10[:,:,nor_mask] = n10_neg[:,:,nor_mask]
+ n01[:,:,nor_mask] = n01_neg[:,:,nor_mask]
+ n11[:,:,nor_mask] = n11_neg[:,:,nor_mask]
+ else:
+ n00 = verts_norm[:, np_u, np_v].reshape((n_patches,n_sk,n_verts1,3))
+ n10 = verts_norm[:, np_u1, np_v].reshape((n_patches,n_sk,n_verts1,3))
+ n01 = verts_norm[:, np_u, np_v1].reshape((n_patches,n_sk,n_verts1,3))
+ n11 = verts_norm[:, np_u1, np_v1].reshape((n_patches,n_sk,n_verts1,3))
+ n2 = np_lerp2(n00,n10,n01,n11,vx,vy,'shapekeys')
- for i in range(len(p.vertices)):
- fan_polygons.append((p.vertices[i],
- p.vertices[(i + 1) % len(p.vertices)],
- last_vert, last_vert))
+ # NOTE: weight thickness is based on the base position of the
+ # vertices, not on the coordinates of the shape keys
- if bool_material_id: fan_material.append(p.material_index)
- if bool_selection: fan_select.append(p.select)
+ if scale_mode == 'ADAPTIVE':# and normals_mode not in ('OBJECT', 'SHAPEKEYS'): ### not sure
if normals_mode == 'FACES':
- fan_normals.append(p.normal)
-
- fan_me = bpy.data.meshes.new('Fan.Mesh')
- fan_me.from_pydata(tuple(fan_verts), [], tuple(fan_polygons))
- me0 = fan_me.copy()
- bpy.data.meshes.remove(fan_me)
- verts0 = me0.vertices
- base_polygons = me0.polygons
- if normals_mode == 'FACES': base_face_normals = fan_normals
-
- count = 0 # necessary for UV calculation
-
- # TESSELLATION
- j = 0
- jj = -1
- bool_correct = False
-
- # optimization test
- n_faces = len(base_polygons)
- _vs0 = [0]*n_faces
- _nvs0 = [0]*n_faces
- _sz = [0]*n_faces
- _w0 = [[0]*n_faces]*len(ob0.vertex_groups)
- np_faces = [np.array(p) for p in fs1]
- new_faces = [0]*n_faces*n_faces1
- face1_count = 0
-
- for p in base_polygons:
-
- bool_correct = True
- if rotation_mode == 'UV' and ob0.type != 'MESH':
- rotation_mode = 'DEFAULT'
-
- # Random rotation
- if rotation_mode == 'RANDOM':
- shifted_vertices = []
- n_poly_verts = len(p.vertices)
- rand = random.randint(0, n_poly_verts)
- for i in range(n_poly_verts):
- shifted_vertices.append(p.vertices[(i + rand) % n_poly_verts])
- if scale_mode == 'ADAPTIVE':
- verts_area0 = np.array([verts_area[i] for i in shifted_vertices])
- vs0 = np.array([verts0[i].co for i in shifted_vertices])
- nvs0 = np.array([verts0[i].normal for i in shifted_vertices])
- if normals_mode == 'VERTS':
- nvs0 = np.array([verts0[i].normal for i in shifted_vertices])
- # vertex weight
- if bool_vertex_group:
- ws0 = []
- for w in weight:
- _ws0 = []
- for i in shifted_vertices:
- try:
- _ws0.append(w[i])
- except:
- _ws0.append(0)
- ws0.append(np.array(_ws0))
-
- # UV rotation
- elif rotation_mode == 'UV':
- if len(ob0.data.uv_layers) > 0 and fill_mode != 'FAN':
- i = p.index
- if bool_material_id:
- count = sum([len(p.vertices) for p in me0.polygons[:i]])
- #if i == 0: count = 0
- v01 = (me0.uv_layers.active.data[count].uv +
- me0.uv_layers.active.data[count + 1].uv)
- if len(p.vertices) > 3:
- v32 = (me0.uv_layers.active.data[count + 3].uv +
- me0.uv_layers.active.data[count + 2].uv)
- else:
- v32 = (me0.uv_layers.active.data[count].uv +
- me0.uv_layers.active.data[count + 2].uv)
- v0132 = v32 - v01
- v0132.normalize()
-
- v12 = (me0.uv_layers.active.data[count + 1].uv +
- me0.uv_layers.active.data[count + 2].uv)
- if len(p.vertices) > 3:
- v03 = (me0.uv_layers.active.data[count].uv +
- me0.uv_layers.active.data[count + 3].uv)
- else:
- v03 = (me0.uv_layers.active.data[count].uv +
- me0.uv_layers.active.data[count].uv)
- v1203 = v03 - v12
- v1203.normalize()
-
- vertUV = []
- dot1203 = v1203.x
- dot0132 = v0132.x
- if(abs(dot1203) < abs(dot0132)):
- if (dot0132 > 0):
- vertUV = p.vertices[1:] + p.vertices[:1]
- else:
- vertUV = p.vertices[3:] + p.vertices[:3]
+ a2 = mean_area
else:
- if(dot1203 < 0):
- vertUV = p.vertices[:]
- else:
- vertUV = p.vertices[2:] + p.vertices[:2]
- vs0 = np.array([verts0[i].co for i in vertUV])
- nvs0 = np.array([verts0[i].normal for i in vertUV])
-
- # Vertex weight
- if bool_vertex_group:
- ws0 = []
- for w in weight:
- _ws0 = []
- for i in vertUV:
- try:
- _ws0.append(w[i])
- except:
- _ws0.append(0)
- ws0.append(np.array(_ws0))
-
- count += len(p.vertices)
- else: rotation_mode = 'DEFAULT'
-
- # Default rotation
- if rotation_mode == 'DEFAULT':
- vs0 = np.array([verts0[i].co for i in p.vertices])
- nvs0 = np.array([verts0[i].normal for i in p.vertices])
- # Vertex weight
- if bool_vertex_group:
- ws0 = []
- for w in weight:
- _ws0 = []
- for i in p.vertices:
- try:
- _ws0.append(w[i])
- except:
- _ws0.append(0)
- ws0.append(np.array(_ws0))
-
- # optimization test
- _vs0[j] = (vs0[0], vs0[1], vs0[2], vs0[-1])
- if normals_mode == 'VERTS':
- _nvs0[j] = (nvs0[0], nvs0[1], nvs0[2], nvs0[-1])
- #else:
- # _nvs0[j] = base_face_normals[j]
-
-
- # vertex z to normal
- if scale_mode == 'ADAPTIVE':
- poly_faces = (p.vertices[0], p.vertices[1], p.vertices[2], p.vertices[-1])
- if rotation_mode == 'RANDOM': sz = verts_area0
- else: sz = np.array([verts_area[i] for i in poly_faces])
-
- _sz[j] = sz
+ a00 = verts_area[:, np_u, np_v].reshape((n_patches,n_sk,n_verts1,1))
+ a10 = verts_area[:, np_u1, np_v].reshape((n_patches,n_sk,n_verts1,1))
+ a01 = verts_area[:, np_u, np_v1].reshape((n_patches,n_sk,n_verts1,1))
+ a11 = verts_area[:, np_u1, np_v1].reshape((n_patches,n_sk,n_verts1,1))
+ # remapped z scale
+ a2 = np_lerp2(a00,a10,a01,a11,vx,vy,'shapekeys')
- if bool_vertex_group:
- vg_count = 0
- for _ws0 in ws0:
- _w0[vg_count][j] = (_ws0[0], _ws0[1], _ws0[2], _ws0[-1])
- vg_count += 1
-
- for p in fs1:
- new_faces[face1_count] = [i + n_verts1 * j for i in p]
- face1_count += 1
-
- j += 1
-
- # build edges list
- n_edges1 = new_edges.shape[0]
- new_edges = new_edges.reshape((1, n_edges1, 2))
- new_edges = new_edges.repeat(n_faces,axis=0)
- new_edges = new_edges.reshape((n_edges1*n_faces, 2))
- increment = np.arange(n_faces)*n_verts1
- increment = increment.repeat(n_edges1, axis=0)
- increment = increment.reshape((n_faces*n_edges1,1))
- new_edges = new_edges + increment
-
- # optimization test
- _vs0 = np.array(_vs0)
- _sz = np.array(_sz)
-
- _vs0_0 = _vs0[:,0].reshape((n_faces,1,3))
- _vs0_1 = _vs0[:,1].reshape((n_faces,1,3))
- _vs0_2 = _vs0[:,2].reshape((n_faces,1,3))
- _vs0_3 = _vs0[:,3].reshape((n_faces,1,3))
-
- # remapped vertex coordinates
- v0 = _vs0_0 + (_vs0_1 - _vs0_0) * vx
- v1 = _vs0_3 + (_vs0_2 - _vs0_3) * vx
- v2 = v0 + (v1 - v0) * vy
-
- # remapped vertex normal
- if normals_mode == 'VERTS':
- _nvs0 = np.array(_nvs0)
- _nvs0_0 = _nvs0[:,0].reshape((n_faces,1,3))
- _nvs0_1 = _nvs0[:,1].reshape((n_faces,1,3))
- _nvs0_2 = _nvs0[:,2].reshape((n_faces,1,3))
- _nvs0_3 = _nvs0[:,3].reshape((n_faces,1,3))
- nv0 = _nvs0_0 + (_nvs0_1 - _nvs0_0) * vx
- nv1 = _nvs0_3 + (_nvs0_2 - _nvs0_3) * vx
- nv2 = nv0 + (nv1 - nv0) * vy
- else:
- nv2 = np.array(base_face_normals).reshape((n_faces,1,3))
-
- if bool_vertex_group:
- n_vg = len(_w0)
- w = np.array(_w0)
- #for w in _w0:
- #w = np.array(w)
- w_0 = w[:,:,0].reshape((n_vg, n_faces,1,1))
- w_1 = w[:,:,1].reshape((n_vg, n_faces,1,1))
- w_2 = w[:,:,2].reshape((n_vg, n_faces,1,1))
- w_3 = w[:,:,3].reshape((n_vg, n_faces,1,1))
- # remapped weight
- w0 = w_0 + (w_1 - w_0) * vx
- w1 = w_3 + (w_2 - w_3) * vx
- w = w0 + (w1 - w0) * vy
- w = w.reshape((n_vg, n_faces*n_verts1))
- #w = w2.tolist()
-
- if scale_mode == 'ADAPTIVE':
- _sz_0 = _sz[:,0].reshape((n_faces,1,1))
- _sz_1 = _sz[:,1].reshape((n_faces,1,1))
- _sz_2 = _sz[:,2].reshape((n_faces,1,1))
- _sz_3 = _sz[:,3].reshape((n_faces,1,1))
- # remapped z scale
- sz0 = _sz_0 + (_sz_1 - _sz_0) * vx
- sz1 = _sz_3 + (_sz_2 - _sz_3) * vx
- sz2 = sz0 + (sz1 - sz0) * vy
- v3 = v2 + nv2 * vz * sz2
- else:
- v3 = v2 + nv2 * vz
-
- new_verts_np = v3.reshape((n_faces*n_verts1,3))
-
- if bool_shapekeys:
- n_sk = len(vx_key)
- sk_np = [0]*n_sk
- for i in range(n_sk):
- vx = np.array(vx_key)
- vy = np.array(vy_key)
- vz = np.array(vz_key)
-
- # remapped vertex coordinates
- v0 = _vs0_0 + (_vs0_1 - _vs0_0) * vx
- v1 = _vs0_3 + (_vs0_2 - _vs0_3) * vx
- v2 = v0 + (v1 - v0) * vy
-
- # remapped vertex normal
- if normals_mode == 'VERTS':
- nv0 = _nvs0_0 + (_nvs0_1 - _nvs0_0) * vx
- nv1 = _nvs0_3 + (_nvs0_2 - _nvs0_3) * vx
- nv2 = nv0 + (nv1 - nv0) * vy
- else:
- nv2 = np.array(base_face_normals).reshape((n_faces,1,3))
+ store_sk_coordinates = calc_thickness(co2,n2,vz,a2,weight_thickness)
+ co2 = n2 = vz = a2 = weight_thickness = None
+ tissue_time(tt_sk, "Compute ShapeKeys", levels=3)
- if scale_mode == 'ADAPTIVE':
- # remapped z scale
- sz0 = _sz_0 + (_sz_1 - _sz_0) * vx
- sz1 = _sz_3 + (_sz_2 - _sz_3) * vx
- sz2 = sz0 + (sz1 - sz0) * vy
- v3 = v2 + nv2 * vz * sz2
- else:
- v3 = v2 + nv2 * vz
-
- sk_np[i] = v3.reshape((n_faces*n_verts1,3))
-
- #if ob0.type == 'MESH': ob0.data = old_me0
-
- if not bool_correct: return 0
-
- new_verts = new_verts_np.tolist()
- new_name = ob0.name + "_" + ob1.name
- new_me = bpy.data.meshes.new(new_name)
- new_me.from_pydata(new_verts, new_edges.tolist(), new_faces)
- new_me.update(calc_edges=True)
- new_ob = bpy.data.objects.new("tessellate_temp", new_me)
-
- # vertex group
- if bool_vertex_group and False:
- for vg in ob0.vertex_groups:
- new_ob.vertex_groups.new(name=vg.name)
- for i in range(len(vg_np[vg.index])):
- new_ob.vertex_groups[vg.name].add([i], vg_np[vg.index][i],"ADD")
- # vertex group
- if bool_vertex_group:
- for vg in ob0.vertex_groups:
- new_ob.vertex_groups.new(name=vg.name)
- for i in range(len(w[vg.index])):
- new_ob.vertex_groups[vg.name].add([i], w[vg.index,i],"ADD")
-
- if bool_shapekeys:
- basis = com_modifiers
- sk_count = 0
- for sk, val in zip(_ob1.data.shape_keys.key_blocks, original_key_values):
- sk.value = val
- new_ob.shape_key_add(name=sk.name)
- new_ob.data.shape_keys.key_blocks[sk.name].value = val
- # set shape keys vertices
- sk_data = new_ob.data.shape_keys.key_blocks[sk.name].data
- if sk_count == 0:
- sk_count += 1
- continue
- for id in range(len(sk_data)):
- sk_data[id].co = sk_np[sk_count-1][id]
- sk_count += 1
- if bool_vertex_group:
- for sk in new_ob.data.shape_keys.key_blocks:
- for vg in new_ob.vertex_groups:
- if sk.name == vg.name:
- sk.vertex_group = vg.name
-
- # EDGES SEAMS
- edge_data = [0]*n_edges1
- me1.edges.foreach_get("use_seam",edge_data)
- if any(edge_data):
- edge_data = edge_data*n_faces
- new_ob.data.edges.foreach_set("use_seam",edge_data)
-
- # EDGES SHARP
- edge_data = [0]*n_edges1
- me1.edges.foreach_get("use_edge_sharp",edge_data)
- if any(edge_data):
- edge_data = edge_data*n_faces
- new_ob.data.edges.foreach_set("use_edge_sharp",edge_data)
-
- bpy.ops.object.select_all(action='DESELECT')
- bpy.context.collection.objects.link(new_ob)
- new_ob.select_set(True)
- bpy.context.view_layer.objects.active = new_ob
-
- # EDGES BEVEL
- edge_data = [0]*n_edges1
- me1.edges.foreach_get("bevel_weight",edge_data)
- if any(edge_data):
- bpy.ops.object.mode_set(mode='EDIT')
- bpy.ops.mesh.select_all(action='SELECT')
- bpy.ops.transform.edge_bevelweight(value=1)
- bpy.ops.object.mode_set(mode='OBJECT')
- edge_data = edge_data*n_faces
- new_ob.data.edges.foreach_set("bevel_weight",edge_data)
+ tt = tissue_time(tt, "Compute Coordinates", levels=2)
- # EDGE CREASES
- edge_data = [0]*n_edges1
- me1.edges.foreach_get("crease",edge_data)
- if any(edge_data):
- bpy.ops.object.mode_set(mode='EDIT')
- bpy.ops.mesh.select_all(action='SELECT')
- bpy.ops.transform.edge_crease(value=1)
- bpy.ops.object.mode_set(mode='OBJECT')
- edge_data = edge_data*n_faces
- new_ob.data.edges.foreach_set('crease', edge_data)
+ new_me = array_mesh(ob1, len(masked_verts))
+ tt = tissue_time(tt, "Repeat component", levels=2)
- # MATERIALS
- for slot in ob1.material_slots: new_ob.data.materials.append(slot.material)
+ new_patch = bpy.data.objects.new("_tissue_tmp_patch", new_me)
+ bpy.context.collection.objects.link(new_patch)
+ store_coordinates = np.concatenate(store_coordinates, axis=0).reshape((-1)).tolist()
+ new_me.vertices.foreach_set('co',store_coordinates)
- polygon_materials = [0]*n_faces1
- me1.polygons.foreach_get("material_index", polygon_materials)
- polygon_materials *= n_faces
- new_ob.data.polygons.foreach_set("material_index", polygon_materials)
- new_ob.data.update() ###
+ for area in bpy.context.screen.areas:
+ for space in area.spaces:
+ try: new_patch.local_view_set(space, True)
+ except: pass
+ tt = tissue_time(tt, "Inject coordinates", levels=2)
- try:
- bpy.data.objects.remove(new_ob1)
- except: pass
+ # Vertex Group
+ for vg in ob1.vertex_groups:
+ vg_name = vg.name
+ if vg_name in ob0.vertex_groups.keys():
+ vg_name = '_{}_'.format(vg_name)
+ new_patch.vertex_groups.new(name=vg_name)
+ if bool_vertex_group:
+ new_groups = []
+ for vg in ob0.vertex_groups:
+ new_groups.append(new_patch.vertex_groups.new(name=vg.name))
+ for vg, w in zip(new_groups, store_weight):
+ set_weight_numpy(vg, w.reshape(-1))
+ tt = tissue_time(tt, "Write Vertex Groups", levels=2)
+ if bool_shapekeys:
+ for sk, val in zip(_ob1.data.shape_keys.key_blocks, original_key_values):
+ sk.value = val
+ new_patch.shape_key_add(name=sk.name, from_mix=False)
+ new_patch.data.shape_keys.key_blocks[sk.name].value = val
+ for i in range(n_sk):
+ coordinates = np.concatenate(store_sk_coordinates[:,i], axis=0)
+ coordinates = coordinates.flatten().tolist()
+ new_patch.data.shape_keys.key_blocks[i+1].data.foreach_set('co', coordinates)
+
+ # set original values and combine Shape Keys and Vertex Groups
+ for sk, val in zip(_ob1.data.shape_keys.key_blocks, original_key_values):
+ sk.value = val
+ new_patch.data.shape_keys.key_blocks[sk.name].value = val
+ if bool_vertex_group:
+ vg_keys = new_patch.vertex_groups.keys()
+ for sk in new_patch.data.shape_keys.key_blocks:
+ if sk.name in vg_keys:
+ sk.vertex_group = sk.name
+ tt = tissue_time(tt, "Shape Keys", levels=2)
+ elif original_key_values:
+ for sk, val in zip(_ob1.data.shape_keys.key_blocks, original_key_values):
+ sk.value = val
+
+ new_name = ob0.name + "_" + ob1.name
+ new_patch.name = "_tissue_tmp_patch"
+ new_patch.data.update() # needed for updating the normals
+ new_objects.append(new_patch)
+ bpy.data.objects.remove(ob1)
bpy.data.objects.remove(ob0)
- bpy.data.meshes.remove(me0)
- bpy.data.objects.remove(ob1)
- bpy.data.meshes.remove(me1)
- return new_ob
-
+ tt = tissue_time(tt, "Closing Tessellate Iteration", levels=2)
+ return new_objects
-class tessellate(Operator):
- bl_idname = "object.tessellate"
- bl_label = "Tessellate"
+class tissue_tessellate(Operator):
+ bl_idname = "object.tissue_tessellate"
+ bl_label = "Tissue Tessellate"
bl_description = ("Create a copy of selected object on the active object's "
"faces, adapting the shape to the different faces")
bl_options = {'REGISTER', 'UNDO'}
+ bool_hold : BoolProperty(
+ name="Hold",
+ description="Wait...",
+ default=False
+ )
object_name : StringProperty(
name="",
description="Name of the generated object"
@@ -1712,7 +1046,7 @@ class tessellate(Operator):
scale_mode : EnumProperty(
items=(
('CONSTANT', "Constant", "Uniform thickness"),
- ('ADAPTIVE', "Proportional", "Preserve component's proportions")
+ ('ADAPTIVE', "Relative", "Preserve component's proportions")
),
default='ADAPTIVE',
name="Z-Scale according to faces size"
@@ -1725,6 +1059,15 @@ class tessellate(Operator):
soft_max=1,
description="Surface offset"
)
+ component_mode : EnumProperty(
+ items=(
+ ('OBJECT', "Object", "Use the same component object for all the faces"),
+ ('COLLECTION', "Collection", "Use multiple components from Collection"),
+ ('MATERIALS', "Materials", "Use multiple components by materials name")
+ ),
+ default='OBJECT',
+ name="Component Mode"
+ )
mode : EnumProperty(
items=(
('BOUNDS', "Bounds", "The component fits automatically the size of the target face"),
@@ -1736,18 +1079,34 @@ class tessellate(Operator):
rotation_mode : EnumProperty(
items=(('RANDOM', "Random", "Random faces rotation"),
('UV', "Active UV", "Face rotation is based on UV coordinates"),
+ ('WEIGHT', "Weight Gradient", "Rotate according to Vertex Group gradient"),
('DEFAULT', "Default", "Default rotation")),
default='DEFAULT',
name="Component Rotation"
)
+ rotation_direction : EnumProperty(
+ items=(('ORTHO', "Orthogonal", "Component main directions in XY"),
+ ('DIAG', "Diagonal", "Component main direction aligned with diagonal")),
+ default='ORTHO',
+ name="Direction"
+ )
+ rotation_shift : IntProperty(
+ name="Shift",
+ default=0,
+ soft_min=0,
+ soft_max=3,
+ description="Shift components rotation"
+ )
fill_mode : EnumProperty(
items=(
+ ('TRI', 'Tri', 'Triangulate the base mesh'),
('QUAD', 'Quad', 'Regular quad tessellation. Uses only 3 or 4 vertices'),
('FAN', 'Fan', 'Radial tessellation for polygonal faces'),
('PATCH', 'Patch', 'Curved tessellation according to the last ' +
'Subsurf\n(or Multires) modifiers. Works only with 4 sides ' +
'patches.\nAfter the last Subsurf (or Multires) only ' +
- 'deformation\nmodifiers can be used')),
+ 'deformation\nmodifiers can be used'),
+ ('FRAME', 'Frame', 'Tessellation along the edges of each face')),
default='QUAD',
name="Fill Mode"
)
@@ -1761,12 +1120,12 @@ class tessellate(Operator):
)
gen_modifiers : BoolProperty(
name="Generator Modifiers",
- default=False,
+ default=True,
description="Apply Modifiers and Shape Keys to the base object"
)
com_modifiers : BoolProperty(
name="Component Modifiers",
- default=False,
+ default=True,
description="Apply Modifiers and Shape Keys to the component object"
)
merge : BoolProperty(
@@ -1774,9 +1133,14 @@ class tessellate(Operator):
default=False,
description="Merge vertices in adjacent duplicates"
)
+ merge_open_edges_only : BoolProperty(
+ name="Open edges only",
+ default=True,
+ description="Merge only open edges"
+ )
merge_thres : FloatProperty(
name="Distance",
- default=0.001,
+ default=0.0001,
soft_min=0,
soft_max=10,
description="Limit below which to merge vertices"
@@ -1786,13 +1150,27 @@ class tessellate(Operator):
default=False,
description="Randomize component rotation"
)
- random_seed : IntProperty(
+ rand_seed : IntProperty(
name="Seed",
default=0,
soft_min=0,
soft_max=10,
description="Random seed"
)
+ coll_rand_seed : IntProperty(
+ name="Seed",
+ default=0,
+ soft_min=0,
+ soft_max=10,
+ description="Random seed"
+ )
+ rand_step : IntProperty(
+ name="Step",
+ default=1,
+ min=1,
+ soft_max=2,
+ description="Random step"
+ )
bool_vertex_group : BoolProperty(
name="Map Vertex Groups",
default=False,
@@ -1830,6 +1208,28 @@ class tessellate(Operator):
description="Component object for the tessellation",
default = ""
)
+ component_coll : StringProperty(
+ name="",
+ description="Components collection for the tessellation",
+ default = ""
+ )
+ target : StringProperty(
+ name="",
+ description="Target object for custom direction",
+ default = ""
+ )
+ even_thickness : BoolProperty(
+ name="Even Thickness",
+ default=False,
+ description="Iterative sampling method for determine the correct length of the vectors (Experimental)"
+ )
+ even_thickness_iter : IntProperty(
+ name="Even Thickness Iterations",
+ default=3,
+ min = 1,
+ soft_max = 20,
+ description="More iterations produces more accurate results but make the tessellation slower"
+ )
bool_material_id : BoolProperty(
name="Tessellation on Material ID",
default=False,
@@ -1852,7 +1252,7 @@ class tessellate(Operator):
min=1,
soft_max=5,
description="Automatically repeat the Tessellation using the "
- + "generated geometry as new base object.\nUseful for "
+ + "generated geometry as new base object.\nUsefull for "
+ "for branching systems. Dangerous!"
)
bool_combine : BoolProperty(
@@ -1867,16 +1267,13 @@ class tessellate(Operator):
)
normals_mode : EnumProperty(
items=(
- ('VERTS', 'Along Normals', 'Consistent direction based on vertices normal'),
- ('FACES', 'Individual Faces', 'Based on individual faces normal')),
+ ('VERTS', 'Normals', 'Consistent direction based on vertices normal'),
+ ('FACES', 'Faces', 'Based on individual faces normal'),
+ ('SHAPEKEYS', 'Keys', "According to base object's shape keys"),
+ ('OBJECT', 'Object', "According to a target object")),
default='VERTS',
name="Direction"
)
- bool_multi_components : BoolProperty(
- name="Multi Components",
- default=False,
- description="Combine different components according to materials name"
- )
bounds_x : EnumProperty(
items=(
('EXTEND', 'Extend', 'Default X coordinates'),
@@ -1893,11 +1290,40 @@ class tessellate(Operator):
default='EXTEND',
name="Bounds Y",
)
+ close_mesh : EnumProperty(
+ items=(
+ ('NONE', 'None', 'Keep the mesh open'),
+ ('CAP', 'Cap Holes', 'Automatically cap open loops'),
+ ('BRIDGE', 'Bridge Open Loops', 'Automatically bridge loop pairs'),
+ ('BRIDGE_CAP', 'Custom', 'Bridge loop pairs and cap holes according to vertex groups')),
+ default='NONE',
+ name="Close Mesh"
+ )
cap_faces : BoolProperty(
name="Cap Holes",
default=False,
description="Cap open edges loops"
)
+ frame_boundary : BoolProperty(
+ name="Frame Boundary",
+ default=False,
+ description="Support face boundaries"
+ )
+ fill_frame : BoolProperty(
+ name="Fill Frame",
+ default=False,
+ description="Fill inner faces with Fan tessellation"
+ )
+ boundary_mat_offset : IntProperty(
+ name="Material Offset",
+ default=0,
+ description="Material Offset for boundaries (with Multi Components or Material ID)"
+ )
+ fill_frame_mat : IntProperty(
+ name="Material Offset",
+ default=0,
+ description="Material Offset for inner faces (with Multi Components or Material ID)"
+ )
open_edges_crease : FloatProperty(
name="Open Edges Crease",
default=0,
@@ -1905,11 +1331,199 @@ class tessellate(Operator):
max=1,
description="Automatically set crease for open edges"
)
- # NOTE: this was made into an annotation for 2.8x, but should be a class variable.
- # working_on = ""
+ bridge_edges_crease : FloatProperty(
+ name="Bridge Edges Crease",
+ default=0,
+ min=0,
+ max=1,
+ description="Automatically set crease for bridge edges"
+ )
+ bridge_smoothness : FloatProperty(
+ name="Smoothness",
+ default=1,
+ min=0,
+ max=1,
+ description="Bridge Smoothness"
+ )
+ frame_thickness : FloatProperty(
+ name="Frame Thickness",
+ default=0.2,
+ min=0,
+ soft_max=2,
+ description="Frame Thickness"
+ )
+ frame_mode : EnumProperty(
+ items=(
+ ('CONSTANT', 'Constant', 'Even thickness'),
+ ('RELATIVE', 'Relative', 'Frame offset depends on face areas')),
+ default='CONSTANT',
+ name="Offset"
+ )
+ bridge_cuts : IntProperty(
+ name="Cuts",
+ default=0,
+ min=0,
+ max=20,
+ description="Bridge Cuts"
+ )
+ cap_material_offset : IntProperty(
+ name="Material Offset",
+ default=0,
+ min=0,
+ description="Material index offset for the cap faces"
+ )
+ bridge_material_offset : IntProperty(
+ name="Material Offset",
+ default=0,
+ min=0,
+ description="Material index offset for the bridge faces"
+ )
+ patch_subs : IntProperty(
+ name="Patch Subdivisions",
+ default=1,
+ min=0,
+ description="Subdivisions levels for Patch tessellation after the first iteration"
+ )
+ use_origin_offset : BoolProperty(
+ name="Align to Origins",
+ default=False,
+ description="Define offset according to components origin and local Z coordinate"
+ )
+
+ vertex_group_thickness : StringProperty(
+ name="Thickness weight", default='',
+ description="Vertex Group used for thickness"
+ )
+ invert_vertex_group_thickness : BoolProperty(
+ name="Invert", default=False,
+ description="Invert the vertex group influence"
+ )
+ vertex_group_thickness_factor : FloatProperty(
+ name="Factor",
+ default=0,
+ min=0,
+ max=1,
+ description="Thickness factor to use for zero vertex group influence"
+ )
+
+ vertex_group_distribution : StringProperty(
+ name="Distribution weight", default='',
+ description="Vertex Group used for gradient distribution"
+ )
+ invert_vertex_group_distribution : BoolProperty(
+ name="Invert", default=False,
+ description="Invert the vertex group influence"
+ )
+ vertex_group_distribution_factor : FloatProperty(
+ name="Factor",
+ default=0,
+ min=0,
+ max=1,
+ description="Randomness factor to use for zero vertex group influence"
+ )
+
+ vertex_group_cap_owner : EnumProperty(
+ items=(
+ ('BASE', 'Base', 'Use base vertex group'),
+ ('COMP', 'Component', 'Use component vertex group')),
+ default='COMP',
+ name="Source"
+ )
+ vertex_group_cap : StringProperty(
+ name="Cap Vertex Group", default='',
+ description="Vertex Group used for cap open edges"
+ )
+ invert_vertex_group_cap : BoolProperty(
+ name="Invert", default=False,
+ description="Invert the vertex group influence"
+ )
+
+ vertex_group_bridge_owner : EnumProperty(
+ items=(
+ ('BASE', 'Base', 'Use base vertex group'),
+ ('COMP', 'Component', 'Use component vertex group')),
+ default='COMP',
+ name="Source"
+ )
+ vertex_group_bridge : StringProperty(
+ name="Thickness weight", default='',
+ description="Vertex Group used for bridge open edges"
+ )
+ invert_vertex_group_bridge : BoolProperty(
+ name="Invert", default=False,
+ description="Invert the vertex group influence"
+ )
+
+ vertex_group_rotation : StringProperty(
+ name="Rotation weight", default='',
+ description="Vertex Group used for rotation"
+ )
+ invert_vertex_group_rotation : BoolProperty(
+ name="Invert", default=False,
+ description="Invert the vertex group influence"
+ )
+ normals_x : FloatProperty(
+ name="X", default=1, min=0, max=1,
+ description="Scale X component of the normals"
+ )
+ normals_y : FloatProperty(
+ name="Y", default=1, min=0, max=1,
+ description="Scale Y component of the normals"
+ )
+ normals_z : FloatProperty(
+ name="Z", default=1, min=0, max=1,
+ description="Scale Z component of the normals"
+ )
+ vertex_group_scale_normals : StringProperty(
+ name="Scale normals weight", default='',
+ description="Vertex Group used for editing the normals directions"
+ )
+ invert_vertex_group_scale_normals : BoolProperty(
+ name="Invert", default=False,
+ description="Invert the vertex group influence"
+ )
+ smooth_normals : BoolProperty(
+ name="Smooth Normals", default=False,
+ description="Smooth normals of the surface in order to reduce intersections"
+ )
+ smooth_normals_iter : IntProperty(
+ name="Iterations",
+ default=5,
+ min=0,
+ description="Smooth iterations"
+ )
+ smooth_normals_uv : FloatProperty(
+ name="UV Anisotropy",
+ default=0,
+ min=-1,
+ max=1,
+ description="0 means no anisotropy, -1 represent the U direction, while 1 represent the V direction"
+ )
+ vertex_group_smooth_normals : StringProperty(
+ name="Smooth Normals weight", default='',
+ description="Vertex Group used for smoothing normals"
+ )
+ invert_vertex_group_smooth_normals : BoolProperty(
+ name="Invert", default=False,
+ description="Invert the vertex group influence"
+ )
+ consistent_wedges : BoolProperty(
+ name="Consistent Wedges", default=True,
+ description="Use same component for the wedges generated by the Fan tessellation"
+ )
+ boundary_variable_offset : BoolProperty(
+ name="Boundary Variable Offset", default=False,
+ description="Additional material offset based on the number of boundary vertices"
+ )
+ auto_rotate_boundary : BoolProperty(
+ name="Automatic Rotation", default=False,
+ description="Automatically rotate the boundary faces"
+ )
+
+ working_on = ""
def draw(self, context):
- allowed_obj = ('MESH', 'CURVE', 'SURFACE', 'FONT', 'META')
+
'''
try:
bool_working = self.working_on == self.object_name and \
@@ -1923,38 +1537,56 @@ class tessellate(Operator):
ob0 = None
ob1 = None
- sel = bpy.context.selected_objects
- if len(sel) == 1:
- try:
- ob0 = sel[0].tissue_tessellate.generator
- ob1 = sel[0].tissue_tessellate.component
- self.generator = ob0.name
- self.component = ob1.name
- bool_working = True
- bool_allowed = True
- except:
- pass
+ ob = context.object
+ sel = context.selected_objects
if len(sel) == 2:
bool_allowed = True
for o in sel:
- if o.type not in allowed_obj:
+ if o.type not in allowed_objects():
bool_allowed = False
- if len(sel) != 2 and not bool_working:
- layout = self.layout
- layout.label(icon='INFO')
- layout.label(text="Please, select two different objects")
- layout.label(text="Select first the Component object, then select")
- layout.label(text="the Base object.")
- elif not bool_allowed and not bool_working:
- layout = self.layout
- layout.label(icon='INFO')
- layout.label(text="Only Mesh, Curve, Surface or Text objects are allowed")
- else:
- if ob0 == ob1 == None:
- ob0 = bpy.context.active_object
- self.generator = ob0.name
+ if self.component_mode == 'OBJECT':
+ if len(sel) != 2 and not bool_working:
+ layout = self.layout
+ layout.label(icon='OBJECT_DATA', text='Single Object Component')
+ layout.label(icon='INFO', text="Please, select two different objects. Select first the")
+ layout.label(text="Component object, then select the Base object.")
+ return
+ elif not bool_allowed and not bool_working:
+ layout = self.layout
+ layout.label(icon='OBJECT_DATA', text='Single Object Component')
+ layout.label(icon='ERROR', text="Please, select Mesh, Curve, Surface, Meta or Text")
+ return
+ elif self.component_mode == 'COLLECTION':
+ no_components = True
+ for o in bpy.data.collections[self.component_coll].objects:
+ if o.type in ('MESH', 'CURVE', 'META', 'SURFACE', 'FONT') and o is not ob0:
+ no_components = False
+ break
+ if no_components:
+ layout = self.layout
+ layout.label(icon='OUTLINER_COLLECTION', text='Components from Active Collection')
+ layout.label(icon='INFO', text="The Active Collection does not containt any Mesh,")
+ layout.label(text="Curve, Surface, Meta or Text object.")
+ return
+ elif self.component_mode == 'MATERIALS':
+ no_components = True
+ for mat in ob.material_slots.keys():
+ if mat in bpy.data.objects.keys():
+ if bpy.data.objects[mat].type in allowed_objects():
+ no_components = False
+ break
+ if no_components:
+ layout = self.layout
+ layout.label(icon='INFO', text='Components from Materials')
+ layout.label(text="Can't find any object according to the materials name.")
+ return
+
+ if ob0 == ob1 == None:
+ ob0 = context.object
+ self.generator = ob0.name
+ if self.component_mode == 'OBJECT':
for o in sel:
if o != ob0:
ob1 = o
@@ -1962,250 +1594,121 @@ class tessellate(Operator):
self.no_component = False
break
- # new object name
- if self.object_name == "":
- if self.generator == "":
- self.object_name = "Tessellation"
- else:
- #self.object_name = self.generator + "_Tessellation"
- self.object_name = "Tessellation"
-
- layout = self.layout
- # Base and Component
- col = layout.column(align=True)
- row = col.row(align=True)
- row.label(text="BASE : " + self.generator)
- row.label(text="COMPONENT : " + self.component)
-
- # Base Modifiers
- row = col.row(align=True)
- col2 = row.column(align=True)
- col2.prop(self, "gen_modifiers", text="Use Modifiers", icon='MODIFIER')
- base = bpy.data.objects[self.generator]
- try:
- if not (base.modifiers or base.data.shape_keys):
- col2.enabled = False
- self.gen_modifiers = False
- except:
- col2.enabled = False
- self.gen_modifiers = False
+ # new object name
+ if self.object_name == "":
+ if self.generator == "":
+ self.object_name = "Tessellation"
+ else:
+ #self.object_name = self.generator + "_Tessellation"
+ self.object_name = "Tessellation"
- # Component Modifiers
- row.separator()
- col3 = row.column(align=True)
- col3.prop(self, "com_modifiers", text="Use Modifiers", icon='MODIFIER')
+ layout = self.layout
+ # Base and Component
+ col = layout.column(align=True)
+ #col.prop(self, "copy_settings")
+ row = col.row(align=True)
+ row.label(text="Base : " + self.generator, icon='OBJECT_DATA')
+ if self.component_mode == 'OBJECT':
+ row.label(text="Component : " + self.component, icon='OBJECT_DATA')
+ elif self.component_mode == 'COLLECTION':
+ row.label(text="Collection : " + self.component_coll, icon='OUTLINER_COLLECTION')
+ elif self.component_mode == 'MATERIALS':
+ row.label(text="Multiple Components", icon='MATERIAL')
+
+ # Base Modifiers
+ row = col.row(align=True)
+ col2 = row.column(align=True)
+ col2.prop(self, "gen_modifiers", text="Use Modifiers", icon='MODIFIER')
+ base = bpy.data.objects[self.generator]
+
+ # Component Modifiers
+ row.separator()
+ col3 = row.column(align=True)
+ col3.prop(self, "com_modifiers", text="Use Modifiers", icon='MODIFIER')
+ if self.component_mode == 'OBJECT':
component = bpy.data.objects[self.component]
- try:
- if not (component.modifiers or component.data.shape_keys):
- col3.enabled = False
- self.com_modifiers = False
- except:
- col3.enabled = False
- self.com_modifiers = False
+ col.separator()
+ # Fill and Rotation
+ row = col.row(align=True)
+ row.label(text="Fill Mode:")
+ row = col.row(align=True)
+ row.prop(
+ self, "fill_mode", icon='NONE', expand=True,
+ slider=True, toggle=False, icon_only=False, event=False,
+ full_event=False, emboss=True, index=-1)
+ row = col.row(align=True)
+ # merge settings
+ row.prop(self, "merge")
+ row.prop(self, "bool_smooth")
+
+ # frame settings
+ if self.fill_mode == 'FRAME':
col.separator()
- # Fill and Rotation
+ col.label(text="Frame Settings:")
row = col.row(align=True)
- row.label(text="Fill Mode:")
- row.label(text="Rotation:")
+ row.prop(self, "frame_mode", expand=True)
+ col.prop(self, "frame_thickness", text='Thickness', icon='NONE')
+ col.separator()
row = col.row(align=True)
- #col2 = row.column(align=True)
- row.prop(
- self, "fill_mode", text="", icon='NONE', expand=False,
- slider=True, toggle=False, icon_only=False, event=False,
- full_event=False, emboss=True, index=-1)
-
- # Rotation
- row.separator()
+ row.prop(self, "fill_frame", icon='NONE')
+ show_frame_mat = self.component_mode == 'MATERIALS' or self.bool_material_id
col2 = row.column(align=True)
- col2.prop(
- self, "rotation_mode", text="", icon='NONE', expand=False,
- slider=True, toggle=False, icon_only=False, event=False,
- full_event=False, emboss=True, index=-1)
- if self.rotation_mode == 'RANDOM':
- col2.prop(self, "random_seed")
-
- if self.rotation_mode == 'UV':
- uv_error = False
- if self.fill_mode == 'FAN':
- row = col.row(align=True)
- row.label(text="UV rotation doesn't work in FAN mode",
- icon='ERROR')
- uv_error = True
+ col2.prop(self, "fill_frame_mat", icon='NONE')
+ col2.enabled = self.fill_frame and show_frame_mat
+ row = col.row(align=True)
+ row.prop(self, "frame_boundary", text='Boundary', icon='NONE')
+ col2 = row.column(align=True)
+ col2.prop(self, "boundary_mat_offset", icon='NONE')
+ col2.enabled = self.frame_boundary and show_frame_mat
- if ob0.type != 'MESH':
+ if self.rotation_mode == 'UV':
+ uv_error = False
+ if ob0.type != 'MESH':
+ row = col.row(align=True)
+ row.label(
+ text="UV rotation supported only for Mesh objects",
+ icon='ERROR')
+ uv_error = True
+ else:
+ if len(ob0.data.uv_layers) == 0:
row = col.row(align=True)
- row.label(
- text="UV rotation supported only for Mesh objects",
- icon='ERROR')
+ check_name = self.generator
+ row.label(text="'" + check_name +
+ "' doesn't have UV Maps", icon='ERROR')
uv_error = True
- else:
- if len(ob0.data.uv_layers) == 0:
- row = col.row(align=True)
- check_name = self.generator
- row.label(text="'" + check_name +
- "' doesn't have UV Maps", icon='ERROR')
- uv_error = True
- if uv_error:
- row = col.row(align=True)
- row.label(text="Default rotation will be used instead",
- icon='INFO')
-
- # Component XY
- row = col.row(align=True)
- row.label(text="Component Coordinates:")
- row = col.row(align=True)
- row.prop(
- self, "mode", text="Component XY", icon='NONE', expand=True,
- slider=False, toggle=False, icon_only=False, event=False,
- full_event=False, emboss=True, index=-1)
-
- if self.mode != 'BOUNDS':
- col.separator()
+ if uv_error:
row = col.row(align=True)
- row.label(text="X:")
- row.prop(
- self, "bounds_x", text="Bounds X", icon='NONE', expand=True,
- slider=False, toggle=False, icon_only=False, event=False,
- full_event=False, emboss=True, index=-1)
+ row.label(text="Default rotation will be used instead",
+ icon='INFO')
- row = col.row(align=True)
- row.label(text="Y:")
- row.prop(
- self, "bounds_y", text="Bounds X", icon='NONE', expand=True,
- slider=False, toggle=False, icon_only=False, event=False,
- full_event=False, emboss=True, index=-1)
-
- # Component Z
- col.label(text="Thickness:")
+ # Component Z
+ col.separator()
+ col.label(text="Thickness:")
+ row = col.row(align=True)
+ row.prop(
+ self, "scale_mode", text="Scale Mode", icon='NONE', expand=True,
+ slider=False, toggle=False, icon_only=False, event=False,
+ full_event=False, emboss=True, index=-1)
+ col.prop(
+ self, "zscale", text="Scale", icon='NONE', expand=False,
+ slider=True, toggle=False, icon_only=False, event=False,
+ full_event=False, emboss=True, index=-1)
+ if self.mode == 'BOUNDS':
row = col.row(align=True)
row.prop(
- self, "scale_mode", text="Scale Mode", icon='NONE', expand=True,
- slider=False, toggle=False, icon_only=False, event=False,
- full_event=False, emboss=True, index=-1)
- col.prop(
- self, "zscale", text="Scale", icon='NONE', expand=False,
+ self, "offset", text="Offset", icon='NONE', expand=False,
slider=True, toggle=False, icon_only=False, event=False,
full_event=False, emboss=True, index=-1)
- if self.mode == 'BOUNDS':
- col.prop(
- self, "offset", text="Offset", icon='NONE', expand=False,
- slider=True, toggle=False, icon_only=False, event=False,
- full_event=False, emboss=True, index=-1)
-
- # Direction
- row = col.row(align=True)
- row.label(text="Direction:")
- row = col.row(align=True)
- row.prop(
- self, "normals_mode", text="Direction", icon='NONE', expand=True,
- slider=False, toggle=False, icon_only=False, event=False,
- full_event=False, emboss=True, index=-1)
- row.enabled = self.fill_mode != 'PATCH'
-
- # Merge
- col = layout.column(align=True)
- row = col.row(align=True)
- row.prop(self, "merge")
- if self.merge:
- row.prop(self, "merge_thres")
- row = col.row(align=True)
-
- row = col.row(align=True)
- row.prop(self, "bool_smooth")
- if self.merge:
- col2 = row.column(align=True)
- col2.prop(self, "bool_dissolve_seams")
- #if ob1.type != 'MESH': col2.enabled = False
-
- row = col.row(align=True)
- row.prop(self, "cap_faces")
- if self.cap_faces:
- col2 = row.column(align=True)
- col2.prop(self, "open_edges_crease", text="Crease")
-
- # Advanced Settings
- col = layout.column(align=True)
- col.separator()
- col.separator()
- row = col.row(align=True)
- row.prop(self, "bool_advanced", icon='SETTINGS')
- if self.bool_advanced:
- allow_multi = False
- allow_shapekeys = not self.com_modifiers
- for m in ob0.data.materials:
- try:
- o = bpy.data.objects[m.name]
- allow_multi = True
- try:
- if o.data.shape_keys is None: continue
- elif len(o.data.shape_keys.key_blocks) < 2: continue
- else: allow_shapekeys = not self.com_modifiers
- except: pass
- except: pass
- # DATA #
- col = layout.column(align=True)
- col.label(text="Morphing:")
- # vertex group + shape keys
- row = col.row(align=True)
- col2 = row.column(align=True)
- col2.prop(self, "bool_vertex_group", icon='GROUP_VERTEX')
- #col2.prop_search(props, "vertex_group", props.generator, "vertex_groups")
- try:
- if len(ob0.vertex_groups) == 0:
- col2.enabled = False
- except:
- col2.enabled = False
- row.separator()
- col2 = row.column(align=True)
- row2 = col2.row(align=True)
- row2.prop(self, "bool_shapekeys", text="Use Shape Keys", icon='SHAPEKEY_DATA')
- row2.enabled = allow_shapekeys
-
- # LIMITED TESSELLATION
- col = layout.column(align=True)
- col.label(text="Limited Tessellation:")
- row = col.row(align=True)
- col2 = row.column(align=True)
- col2.prop(self, "bool_multi_components", icon='MOD_TINT')
- if not allow_multi:
- col2.enabled = False
- self.bool_multi_components = False
- col.separator()
- row = col.row(align=True)
- col2 = row.column(align=True)
- col2.prop(self, "bool_selection", text="On selected Faces", icon='RESTRICT_SELECT_OFF')
- #if self.bool_material_id or self.bool_selection or self.bool_multi_components:
- #col2 = row.column(align=True)
- # col2.prop(self, "bool_combine")
- row.separator()
- if ob0.type != 'MESH':
- col2.enabled = False
- col2 = row.column(align=True)
- col2.prop(self, "bool_material_id", icon='MATERIAL_DATA', text="Material ID")
- if self.bool_material_id and not self.bool_multi_components:
- #col2 = row.column(align=True)
- col2.prop(self, "material_id")
- col2.enabled = not self.bool_multi_components
+ row.enabled = not self.use_origin_offset
+ col.separator()
+ col.label(text="More settings in the Object Data Properties panel...", icon='PROPERTIES')
- col.separator()
- row = col.row(align=True)
- row.label(text='Reiterate Tessellation:', icon='FILE_REFRESH')
- row.prop(self, 'iterations', text='Repeat', icon='SETTINGS')
-
- col.separator()
- row = col.row(align=True)
- row.label(text='Combine Iterations:')
- row = col.row(align=True)
- row.prop(
- self, "combine_mode", icon='NONE', expand=True,
- slider=False, toggle=False, icon_only=False, event=False,
- full_event=False, emboss=True, index=-1)
def execute(self, context):
- allowed_obj = ('MESH', 'CURVE', 'META', 'SURFACE', 'FONT')
try:
ob0 = bpy.data.objects[self.generator]
- ob1 = bpy.data.objects[self.component]
+ if self.component_mode == 'OBJECT':
+ ob1 = bpy.data.objects[self.component]
except:
return {'CANCELLED'}
@@ -2220,100 +1723,160 @@ class tessellate(Operator):
self.object_name = test_name
break
count_name += 1
+ if self.component_mode == 'OBJECT':
+ if ob1.type not in allowed_objects():
+ message = "Component must be Mesh, Curve, Surface, Text or Meta object!"
+ self.report({'ERROR'}, message)
+ self.component = None
- if ob1.type not in allowed_obj:
- message = "Component must be Mesh, Curve, Surface, Text or Meta object!"
- self.report({'ERROR'}, message)
- self.component = None
-
- if ob0.type not in allowed_obj:
+ if ob0.type not in allowed_objects():
message = "Generator must be Mesh, Curve, Surface, Text or Meta object!"
self.report({'ERROR'}, message)
self.generator = ""
- if True:#self.component not in ("",None) and self.generator not in ("",None):
- if bpy.ops.object.select_all.poll():
- bpy.ops.object.select_all(action='TOGGLE')
- bpy.ops.object.mode_set(mode='OBJECT')
+ if bpy.ops.object.select_all.poll():
+ bpy.ops.object.select_all(action='TOGGLE')
+ bpy.ops.object.mode_set(mode='OBJECT')
- #data0 = ob0.to_mesh(False)
- #data0 = ob0.data.copy()
- bool_update = False
- if bpy.context.object == ob0:
- auto_layer_collection()
- #new_ob = bpy.data.objects.new(self.object_name, data0)
- new_ob = convert_object_to_mesh(ob0,False,False)
- new_ob.data.name = self.object_name
- #bpy.context.collection.objects.link(new_ob)
- #bpy.context.view_layer.objects.active = new_ob
- new_ob.name = self.object_name
- #new_ob.select_set(True)
- else:
- new_ob = bpy.context.object
- bool_update = True
- new_ob = store_parameters(self, new_ob)
- try: bpy.ops.object.update_tessellate()
- except RuntimeError as e:
- bpy.data.objects.remove(new_ob)
- self.report({'ERROR'}, str(e))
- return {'CANCELLED'}
- if not bool_update:
- self.object_name = new_ob.name
- #self.working_on = self.object_name
- new_ob.location = ob0.location
- new_ob.matrix_world = ob0.matrix_world
+ bool_update = False
+ if context.object == ob0:
+ auto_layer_collection()
+ new_ob = convert_object_to_mesh(ob0,False,False)
+ new_ob.data.name = self.object_name
+ new_ob.name = self.object_name
+ else:
+ new_ob = context.object
+ bool_update = True
+ new_ob = store_parameters(self, new_ob)
+ new_ob.tissue.tissue_type = 'TESSELLATE'
+ try: bpy.ops.object.tissue_update_tessellate()
+ except RuntimeError as e:
+ bpy.data.objects.remove(new_ob)
+ remove_temp_objects()
+ self.report({'ERROR'}, str(e))
+ return {'CANCELLED'}
+ if not bool_update:
+ self.object_name = new_ob.name
+ #self.working_on = self.object_name
+ new_ob.location = ob0.location
+ new_ob.matrix_world = ob0.matrix_world
+
+ # Assign collection of the base object
+ old_coll = new_ob.users_collection
+ if old_coll != ob0.users_collection:
+ for c in old_coll:
+ c.objects.unlink(new_ob)
+ for c in ob0.users_collection:
+ c.objects.link(new_ob)
+ context.view_layer.objects.active = new_ob
- return {'FINISHED'}
+ return {'FINISHED'}
def invoke(self, context, event):
return context.window_manager.invoke_props_dialog(self)
-class update_tessellate(Operator):
- bl_idname = "object.update_tessellate"
- bl_label = "Refresh"
+class tissue_update_tessellate_deps(Operator):
+ bl_idname = "object.tissue_update_tessellate_deps"
+ bl_label = "Tissue Refresh"
bl_description = ("Fast update the tessellated mesh according to base and "
- "component changes")
+ "component changes.")
bl_options = {'REGISTER', 'UNDO'}
go = False
@classmethod
def poll(cls, context):
- #try:
- try: #context.object == None: return False
- return context.object.tissue_tessellate.generator != None and \
- context.object.tissue_tessellate.component != None
+ try:
+ return context.object.tissue.tissue_type != 'NONE'
except:
return False
- @staticmethod
- def check_gen_comp(checking):
+ #@staticmethod
+ #def check_gen_comp(checking):
# note pass the stored name key in here to check it out
- return checking in bpy.data.objects.keys()
+ # return checking in bpy.data.objects.keys()
def execute(self, context):
+
+ active_ob = context.object
+ selected_objects = context.selected_objects
+
+ ### TO-DO: sorting according to dependencies
+ update_objects = [o for o in selected_objects if o.tissue.tissue_type != 'NONE']
+ for ob in selected_objects:
+ update_objects = list(reversed(update_dependencies(ob, update_objects)))
+ #update_objects = list(reversed(update_dependencies(ob, [ob])))
+ for o in update_objects:
+ override = {
+ 'object': o,
+ 'selected_objects' : [o]
+ }
+ if o.type == 'MESH':
+ try:
+ bpy.ops.object.tissue_update_tessellate(override)
+ except:
+ self.report({'ERROR'}, "Can't Tessellate :-(")
+ else:
+ try:
+ bpy.ops.object.tissue_convert_to_curve_update(override)
+ except:
+ self.report({'ERROR'}, "Can't compute Curve :-(")
+
+ context.view_layer.objects.active = active_ob
+ for o in context.view_layer.objects:
+ o.select_set(o in selected_objects)
+
+ return {'FINISHED'}
+
+
+class tissue_update_tessellate(Operator):
+ bl_idname = "object.tissue_update_tessellate"
+ bl_label = "Tissue Refresh Simple"
+ bl_description = ("Fast update the tessellated mesh according to base and "
+ "component changes. Does not update dependencies")
+ bl_options = {'REGISTER', 'UNDO'}
+
+ go = False
+
+ @classmethod
+ def poll(cls, context):
+ try:
+ ob = context.object
+ return ob.tissue.tissue_type == 'TESSELLATE'
+ except:
+ return False
+
+ def execute(self, context):
+
+ tissue_time(None,'Tissue: Tessellating...', levels=0)
start_time = time.time()
- ob = bpy.context.object
+
+ ob = context.object
+ tess_props = props_to_dict(ob)
if not self.go:
generator = ob.tissue_tessellate.generator
component = ob.tissue_tessellate.component
zscale = ob.tissue_tessellate.zscale
scale_mode = ob.tissue_tessellate.scale_mode
rotation_mode = ob.tissue_tessellate.rotation_mode
+ rotation_shift = ob.tissue_tessellate.rotation_shift
+ rotation_direction = ob.tissue_tessellate.rotation_direction
offset = ob.tissue_tessellate.offset
merge = ob.tissue_tessellate.merge
+ merge_open_edges_only = ob.tissue_tessellate.merge_open_edges_only
merge_thres = ob.tissue_tessellate.merge_thres
+ mode = ob.tissue_tessellate.mode
gen_modifiers = ob.tissue_tessellate.gen_modifiers
com_modifiers = ob.tissue_tessellate.com_modifiers
bool_random = ob.tissue_tessellate.bool_random
- random_seed = ob.tissue_tessellate.random_seed
+ rand_seed = ob.tissue_tessellate.rand_seed
+ rand_step = ob.tissue_tessellate.rand_step
fill_mode = ob.tissue_tessellate.fill_mode
bool_vertex_group = ob.tissue_tessellate.bool_vertex_group
bool_selection = ob.tissue_tessellate.bool_selection
bool_shapekeys = ob.tissue_tessellate.bool_shapekeys
- mode = ob.tissue_tessellate.mode
bool_smooth = ob.tissue_tessellate.bool_smooth
bool_materials = ob.tissue_tessellate.bool_materials
bool_dissolve_seams = ob.tissue_tessellate.bool_dissolve_seams
@@ -2323,26 +1886,67 @@ class update_tessellate(Operator):
bool_combine = ob.tissue_tessellate.bool_combine
normals_mode = ob.tissue_tessellate.normals_mode
bool_advanced = ob.tissue_tessellate.bool_advanced
- bool_multi_components = ob.tissue_tessellate.bool_multi_components
+ #bool_multi_components = ob.tissue_tessellate.bool_multi_components
combine_mode = ob.tissue_tessellate.combine_mode
bounds_x = ob.tissue_tessellate.bounds_x
bounds_y = ob.tissue_tessellate.bounds_y
cap_faces = ob.tissue_tessellate.cap_faces
+ close_mesh = ob.tissue_tessellate.close_mesh
open_edges_crease = ob.tissue_tessellate.open_edges_crease
-
+ bridge_edges_crease = ob.tissue_tessellate.bridge_edges_crease
+ bridge_smoothness = ob.tissue_tessellate.bridge_smoothness
+ frame_thickness = ob.tissue_tessellate.frame_thickness
+ frame_mode = ob.tissue_tessellate.frame_mode
+ frame_boundary = ob.tissue_tessellate.frame_boundary
+ fill_frame = ob.tissue_tessellate.fill_frame
+ boundary_mat_offset = ob.tissue_tessellate.boundary_mat_offset
+ fill_frame_mat = ob.tissue_tessellate.fill_frame_mat
+ bridge_cuts = ob.tissue_tessellate.bridge_cuts
+ cap_material_offset = ob.tissue_tessellate.cap_material_offset
+ bridge_material_offset = ob.tissue_tessellate.bridge_material_offset
+ patch_subs = ob.tissue_tessellate.patch_subs
+ use_origin_offset = ob.tissue_tessellate.use_origin_offset
+ vertex_group_thickness = ob.tissue_tessellate.vertex_group_thickness
+ invert_vertex_group_thickness = ob.tissue_tessellate.invert_vertex_group_thickness
+ vertex_group_thickness_factor = ob.tissue_tessellate.vertex_group_thickness_factor
+ vertex_group_distribution = ob.tissue_tessellate.vertex_group_distribution
+ invert_vertex_group_distribution = ob.tissue_tessellate.invert_vertex_group_distribution
+ vertex_group_distribution_factor = ob.tissue_tessellate.vertex_group_distribution_factor
+ vertex_group_cap_owner = ob.tissue_tessellate.vertex_group_cap_owner
+ vertex_group_cap = ob.tissue_tessellate.vertex_group_cap
+ invert_vertex_group_cap = ob.tissue_tessellate.invert_vertex_group_cap
+ vertex_group_bridge_owner = ob.tissue_tessellate.vertex_group_bridge_owner
+ vertex_group_bridge = ob.tissue_tessellate.vertex_group_bridge
+ invert_vertex_group_bridge = ob.tissue_tessellate.invert_vertex_group_bridge
+ vertex_group_rotation = ob.tissue_tessellate.vertex_group_rotation
+ invert_vertex_group_rotation = ob.tissue_tessellate.invert_vertex_group_rotation
+ vertex_group_smooth_normals = ob.tissue_tessellate.vertex_group_smooth_normals
+ invert_vertex_group_smooth_normals = ob.tissue_tessellate.invert_vertex_group_smooth_normals
+ target = ob.tissue_tessellate.target
+ even_thickness = ob.tissue_tessellate.even_thickness
+ even_thickness_iter = ob.tissue_tessellate.even_thickness_iter
+ component_mode = ob.tissue_tessellate.component_mode
+ component_coll = ob.tissue_tessellate.component_coll
+ coll_rand_seed = ob.tissue_tessellate.coll_rand_seed
try:
generator.name
- component.name
+ if component_mode == 'OBJECT':
+ component.name
except:
self.report({'ERROR'},
- "Active object must be Tessellate before Update")
+ "Active object must be Tessellated before Update")
return {'CANCELLED'}
+ # reset messages
+ ob.tissue_tessellate.warning_message_merge = ''
+
+ tess_props = props_to_dict(ob)
+
# Solve Local View issues
local_spaces = []
local_ob0 = []
local_ob1 = []
- for area in bpy.context.screen.areas:
+ for area in context.screen.areas:
for space in area.spaces:
try:
if ob.local_view_get(space):
@@ -2354,196 +1958,258 @@ class update_tessellate(Operator):
except:
pass
- starting_mode = bpy.context.object.mode
+ starting_mode = context.object.mode
+
#if starting_mode == 'PAINT_WEIGHT': starting_mode = 'WEIGHT_PAINT'
- bpy.ops.object.mode_set(mode='OBJECT')
+ if bpy.ops.object.mode_set.poll():
+ bpy.ops.object.mode_set(mode='OBJECT')
ob0 = generator
ob1 = component
- auto_layer_collection()
+ ##### auto_layer_collection()
ob0_hide = ob0.hide_get()
ob0_hidev = ob0.hide_viewport
ob0_hider = ob0.hide_render
- ob1_hide = ob1.hide_get()
- ob1_hidev = ob1.hide_viewport
- ob1_hider = ob1.hide_render
ob0.hide_set(False)
ob0.hide_viewport = False
ob0.hide_render = False
- ob1.hide_set(False)
- ob1.hide_viewport = False
- ob1.hide_render = False
+ if component_mode == 'OBJECT':
+ ob1_hide = ob1.hide_get()
+ ob1_hidev = ob1.hide_viewport
+ ob1_hider = ob1.hide_render
+ ob1.hide_set(False)
+ ob1.hide_viewport = False
+ ob1.hide_render = False
+
+ components = []
+ if component_mode == 'COLLECTION':
+ dict_components = {}
+ meta_object = True
+ for _ob1 in component_coll.objects:
+ if _ob1 == ob: continue
+ if _ob1.type in ('MESH', 'CURVE','SURFACE','FONT','META'):
+ if _ob1.type == 'META':
+ if meta_object: meta_object = False
+ else: continue
+ dict_components[_ob1.name] = _ob1
+ for k in sorted(dict_components):
+ components.append(dict_components[k])
+ elif component_mode == 'OBJECT':
+ components.append(ob1)
if ob0.type == 'META':
base_ob = convert_object_to_mesh(ob0, False, True)
else:
base_ob = ob0.copy()
- base_ob.data = ob0.data.copy()
- bpy.context.collection.objects.link(base_ob)
+ base_ob.data = ob0.data
+ context.collection.objects.link(base_ob)
+ base_ob.name = '_tissue_tmp_base'
# In Blender 2.80 cache of copied objects is lost, must be re-baked
bool_update_cloth = False
for m in base_ob.modifiers:
if m.type == 'CLOTH':
- m.point_cache.frame_end = bpy.context.scene.frame_current
+ m.point_cache.frame_end = context.scene.frame_current
bool_update_cloth = True
if bool_update_cloth:
- bpy.ops.ptcache.free_bake_all()
- bpy.ops.ptcache.bake_all()
-
- #new_ob.location = ob.location
- #new_ob.matrix_world = ob.matrix_world
+ scene = context.scene
+ for mod in base_ob.modifiers:
+ if mod.type == 'CLOTH':
+ override = {'scene': scene, 'active_object': base_ob, 'point_cache': mod.point_cache}
+ bpy.ops.ptcache.bake(override, bake=True)
+ break
base_ob.modifiers.update()
- bpy.ops.object.select_all(action='DESELECT')
+
+ # clear vertex groups before creating new ones
+ if ob not in components: ob.vertex_groups.clear()
+
+ if bool_selection:
+ faces = base_ob.data.polygons
+ selections = [False]*len(faces)
+ faces.foreach_get('select',selections)
+ selections = np.array(selections)
+ if not selections.any():
+ message = "There are no faces selected."
+ context.view_layer.objects.active = ob
+ ob.select_set(True)
+ bpy.ops.object.mode_set(mode=starting_mode)
+ remove_temp_objects()
+ self.report({'ERROR'}, message)
+ return {'CANCELLED'}
+
iter_objects = [base_ob]
- #base_ob = new_ob#.copy()
+ ob_location = ob.location
+ ob_matrix_world = ob.matrix_world
+
+ #if ob not in components:
+ ob.data.clear_geometry() # Faster with heavy geometries (from previous tessellations)
for iter in range(iterations):
+ tess_props['generator'] = base_ob
+
+ if iter > 0 and len(iter_objects) == 0: break
+ if iter > 0 and normals_mode in ('SHAPEKEYS','OBJECT'):
+ tess_props['normals_mode'] = 'VERTS'
same_iteration = []
matched_materials = []
- if bool_multi_components: mat_iter = len(base_ob.material_slots)
- else: mat_iter = 1
- for m_id in range(mat_iter):
- if bool_multi_components:
- try:
- mat = base_ob.material_slots[m_id].material
- ob1 = bpy.data.objects[mat.name]
- material_id = m_id
- matched_materials.append(m_id)
- bool_material_id = True
- except:
- continue
- if com_modifiers:
- data1 = simple_to_mesh(ob1)
- else: data1 = ob1.data.copy()
- n_edges1 = len(data1.edges)
-
- if iter != 0: gen_modifiers = True
- if fill_mode == 'PATCH':
- new_ob = tessellate_patch(
- base_ob, ob1, offset, zscale, com_modifiers, mode, scale_mode,
- rotation_mode, random_seed, bool_vertex_group,
- bool_selection, bool_shapekeys, bool_material_id, material_id,
- bounds_x, bounds_y
- )
- else:
- new_ob = tessellate_original(
- base_ob, ob1, offset, zscale, gen_modifiers,
- com_modifiers, mode, scale_mode, rotation_mode,
- random_seed, fill_mode, bool_vertex_group,
- bool_selection, bool_shapekeys, bool_material_id,
- material_id, normals_mode, bounds_x, bounds_y
- )
- if type(new_ob) is bpy.types.Object:
- bpy.context.view_layer.objects.active = new_ob
- else:
- continue
- n_components = int(len(new_ob.data.edges) / n_edges1)
- # SELECTION
- if bool_selection:
- try:
- # create selection list
- polygon_selection = [p.select for p in ob1.data.polygons] * int(
- len(new_ob.data.polygons) / len(ob1.data.polygons))
- new_ob.data.polygons.foreach_set("select", polygon_selection)
- except:
- pass
-
- if type(new_ob) == str: break
-
- if bool_multi_components and type(new_ob) not in (int,str):
- same_iteration.append(new_ob)
- new_ob.select_set(True)
- bpy.context.view_layer.objects.active = new_ob
-
- if type(new_ob) == str: break
-
- #bpy.data.objects.remove(base_ob)
- if bool_multi_components:
- bpy.context.view_layer.update()
- bpy.context.view_layer.objects.active.select_set(True)
- for o in bpy.data.objects:
- if o in same_iteration:
- o.select_set(True)
- o.location = ob.location
+
+ if component_mode == 'MATERIALS':
+ components = []
+ objects_keys = bpy.data.objects.keys()
+ for mat_slot in base_ob.material_slots:
+ mat_name = mat_slot.material.name
+ if mat_name in objects_keys:
+ ob1 = bpy.data.objects[mat_name]
+ if ob1.type in ('MESH', 'CURVE','SURFACE','FONT','META'):
+ components.append(bpy.data.objects[mat_name])
+ matched_materials.append(mat_name)
+ else:
+ components.append(None)
else:
- try:
- o.select_set(False)
- except: pass
- bpy.ops.object.join()
- new_ob = bpy.context.view_layer.objects.active
+ components.append(None)
+ tess_props['component'] = components
+ # patch subdivisions for additional iterations
+ if iter > 0 and fill_mode == 'PATCH':
+ temp_mod = base_ob.modifiers.new('Tissue_Subsurf', type='SUBSURF')
+ temp_mod.levels = patch_subs
+
+ # patch tessellation
+ tissue_time(None,"Tessellate iteration...",levels=1)
+ tt = time.time()
+ same_iteration = tessellate_patch(tess_props)
+ tissue_time(tt, "Tessellate iteration",levels=1)
+
+ tt = time.time()
+
+ # if empty or error, continue
+ #if type(same_iteration) != list:#is not bpy.types.Object and :
+ # return {'CANCELLED'}
+
+ for id, new_ob in enumerate(same_iteration):
+ # rename, make active and change transformations
+ new_ob.name = '_tissue_tmp_{}_{}'.format(iter,id)
new_ob.select_set(True)
- new_ob.data.update()
+ context.view_layer.objects.active = new_ob
+ new_ob.location = ob_location
+ new_ob.matrix_world = ob_matrix_world
+
+ base_ob.location = ob_location
+ base_ob.matrix_world = ob_matrix_world
+ # join together multiple components iterations
+ if type(same_iteration) == list:
+ if len(same_iteration) == 0:
+ remove_temp_objects()
+ tissue_time(None,"Can't Tessellate :-(",levels=0)
+ return {'CANCELLED'}
+ if len(same_iteration) > 1:
+ #join_objects(context, same_iteration)
+ new_ob = join_objects(same_iteration)
+
+ if type(same_iteration) in (int,str):
+ new_ob = same_iteration
+ if iter == 0:
+ try:
+ bpy.data.objects.remove(iter_objects[0])
+ iter_objects = []
+ except: continue
+ continue
- #try:
- # combine object
+ # Clean last iteration, needed for combine object
if (bool_selection or bool_material_id) and combine_mode == 'UNUSED':
# remove faces from last mesh
bm = bmesh.new()
-
- last_mesh = iter_objects[-1].data.copy()
-
+ if (fill_mode == 'PATCH' or gen_modifiers) and iter == 0:
+ last_mesh = simple_to_mesh(base_ob)#(ob0)
+ else:
+ last_mesh = iter_objects[-1].data.copy()
bm.from_mesh(last_mesh)
bm.faces.ensure_lookup_table()
- if bool_multi_components:
+ if component_mode == 'MATERIALS':
remove_materials = matched_materials
elif bool_material_id:
remove_materials = [material_id]
else: remove_materials = []
if bool_selection:
- remove_faces = [f for f in bm.faces if f.material_index in remove_materials and f.select]
+ if component_mode == 'MATERIALS' or bool_material_id:
+ remove_faces = [f for f in bm.faces if f.material_index in remove_materials and f.select]
+ else:
+ remove_faces = [f for f in bm.faces if f.select]
else:
remove_faces = [f for f in bm.faces if f.material_index in remove_materials]
bmesh.ops.delete(bm, geom=remove_faces, context='FACES')
bm.to_mesh(last_mesh)
+ bm.free()
last_mesh.update()
+ last_mesh.name = '_tissue_tmp_previous_unused'
+ # delete previous iteration if empty or update it
if len(last_mesh.vertices) > 0:
iter_objects[-1].data = last_mesh.copy()
iter_objects[-1].data.update()
else:
bpy.data.objects.remove(iter_objects[-1])
iter_objects = iter_objects[:-1]
-
+ # set new base object for next iteration
base_ob = convert_object_to_mesh(new_ob,True,True)
- #bpy.context.collection.objects.unlink(base_ob)
if iter < iterations-1: new_ob.data = base_ob.data
-
+ # store new iteration and set transformations
iter_objects.append(new_ob)
- new_ob.location = ob.location
- new_ob.matrix_world = ob.matrix_world
- try:
- bpy.data.objects.remove(bpy.data.objects['_Tessellation_Base'])
- except: pass
- base_ob.name = "_Tessellation_Base"
+ base_ob.name = '_tissue_tmp_base'
elif combine_mode == 'ALL':
base_ob = new_ob.copy()
- iter_objects.append(new_ob)
- new_ob.location = ob.location
- new_ob.matrix_world = ob.matrix_world
+ iter_objects = [new_ob] + iter_objects
else:
if base_ob != new_ob:
bpy.data.objects.remove(base_ob)
base_ob = new_ob
iter_objects = [new_ob]
+ if iter > 0:# and fill_mode == 'PATCH':
+ base_ob.modifiers.clear()#remove(temp_mod)
+
+ # Combine
+ if combine_mode != 'LAST' and len(iter_objects) > 1:
+ if base_ob not in iter_objects and type(base_ob) == bpy.types.Object:
+ bpy.data.objects.remove(base_ob)
+ new_ob = join_objects(iter_objects)
+ new_ob.modifiers.clear()
+ iter_objects = [new_ob]
+
+ tissue_time(tt, "Combine tessellations", levels=1)
+
+ if merge:
+ new_ob.active_shape_key_index = 0
+ use_bmesh = not (bool_shapekeys and fill_mode == 'PATCH' and component_mode != 'OBJECT')
+ merged = merge_components(new_ob, ob.tissue_tessellate, use_bmesh)
+ if merged == 'bridge_error':
+ message = "Can't make the bridge!"
+ ob.tissue_tessellate.warning_message_merge = message
+
+ base_ob = new_ob #context.view_layer.objects.active
+
+ tt = time.time()
+
if new_ob == 0:
- #for m, vis in zip(ob.modifiers, mod_visibility): m.show_viewport = vis
- message = "Zero faces selected in the Base mesh!"
+ #bpy.data.objects.remove(base_ob.data)
+ try: bpy.data.objects.remove(base_ob)
+ except: pass
+ message = "The generated object is an empty geometry!"
+ context.view_layer.objects.active = ob
+ ob.select_set(True)
bpy.ops.object.mode_set(mode=starting_mode)
self.report({'ERROR'}, message)
return {'CANCELLED'}
errors = {}
errors["modifiers_error"] = "Modifiers that change the topology of the mesh \n" \
"after the last Subsurf (or Multires) are not allowed."
- errors["topology_error"] = "Make sure that the topology of the mesh before \n" \
- "the last Subsurf (or Multires) is quads only."
- errors["wires_error"] = "Please remove all wire edges in the base object."
- errors["verts_error"] = "Please remove all floating vertices in the base object"
if new_ob in errors:
- for o in iter_objects: bpy.data.objects.remove(o)
- bpy.context.view_layer.objects.active = ob
+ for o in iter_objects:
+ try: bpy.data.objects.remove(o)
+ except: pass
+ try: bpy.data.meshes.remove(data1)
+ except: pass
+ context.view_layer.objects.active = ob
ob.select_set(True)
message = errors[new_ob]
ob.tissue_tessellate.error_message = message
@@ -2551,18 +2217,6 @@ class update_tessellate(Operator):
self.report({'ERROR'}, message)
return {'CANCELLED'}
- new_ob.location = ob.location
- new_ob.matrix_world = ob.matrix_world
-
- ### REPEAT
- if combine_mode != 'LAST' and len(iter_objects)>0:
- if base_ob not in iter_objects: bpy.data.objects.remove(base_ob)
- for o in iter_objects:
- o.location = ob.location
- o.select_set(True)
- bpy.ops.object.join()
- new_ob.data.update()
-
# update data and preserve name
if ob.type != 'MESH':
loc, matr = ob.location, ob.matrix_world
@@ -2570,73 +2224,36 @@ class update_tessellate(Operator):
ob.location, ob.matrix_world = loc, matr
data_name = ob.data.name
old_data = ob.data
- ob.data = new_ob.data
- bpy.data.meshes.remove(old_data)
+ old_data.name = '_tissue_tmp_old_data'
+ #ob.data = bpy.data.meshes.new_from_object(new_ob)#
+ linked_objects = [o for o in bpy.data.objects if o.data == old_data]
+
+ for o in linked_objects:
+ o.data = new_ob.data
+ if len(linked_objects) > 1:
+ copy_tessellate_props(ob, o)
+
+ #ob.data = new_ob.data
ob.data.name = data_name
+ bpy.data.meshes.remove(old_data)
# copy vertex group
- if bool_vertex_group:
- for vg in new_ob.vertex_groups:
- if not vg.name in ob.vertex_groups.keys():
- ob.vertex_groups.new(name=vg.name)
- new_vg = ob.vertex_groups[vg.name]
- for i in range(len(ob.data.vertices)):
- try:
- weight = vg.weight(i)
- except:
- weight = 0
- new_vg.add([i], weight, 'REPLACE')
+ for vg in new_ob.vertex_groups:
+ if not vg.name in ob.vertex_groups.keys():
+ ob.vertex_groups.new(name=vg.name)
- selected_objects = [o for o in bpy.context.selected_objects]
+ selected_objects = [o for o in context.selected_objects]
for o in selected_objects: o.select_set(False)
ob.select_set(True)
- bpy.context.view_layer.objects.active = ob
- bpy.data.objects.remove(new_ob)
+ context.view_layer.objects.active = ob
- if merge:
- bpy.ops.object.mode_set(mode='EDIT')
- bpy.ops.mesh.select_mode(
- use_extend=False, use_expand=False, type='VERT')
- bpy.ops.mesh.select_non_manifold(
- extend=False, use_wire=False, use_boundary=True,
- use_multi_face=False, use_non_contiguous=False, use_verts=False)
-
- bpy.ops.mesh.remove_doubles(
- threshold=merge_thres, use_unselected=False)
-
- bpy.ops.object.mode_set(mode='OBJECT')
- if bool_dissolve_seams:
- bpy.ops.object.mode_set(mode='EDIT')
- bpy.ops.mesh.select_mode(type='EDGE')
- bpy.ops.mesh.select_all(action='DESELECT')
- bpy.ops.object.mode_set(mode='OBJECT')
- for e in ob.data.edges:
- e.select = e.use_seam
- bpy.ops.object.mode_set(mode='EDIT')
- bpy.ops.mesh.dissolve_edges()
- if cap_faces:
- bpy.ops.object.mode_set(mode='EDIT')
- bpy.ops.mesh.select_mode(
- use_extend=False, use_expand=False, type='EDGE')
- bpy.ops.mesh.select_non_manifold(
- extend=False, use_wire=False, use_boundary=True,
- use_multi_face=False, use_non_contiguous=False, use_verts=False)
- bpy.ops.mesh.edge_face_add()
- if open_edges_crease != 0:
- bpy.ops.transform.edge_crease(value=open_edges_crease)
-
- bpy.ops.object.mode_set(mode='EDIT')
- bpy.ops.object.mode_set(mode='OBJECT')
+ is_multiple = iterations > 1 or combine_mode != 'LAST'# or bool_multi_components
+ if merge and is_multiple:
+ use_bmesh = not (bool_shapekeys and fill_mode == 'PATCH' and component_mode != 'OBJECT')
+ merge_components(new_ob, ob.tissue_tessellate, use_bmesh)
if bool_smooth: bpy.ops.object.shade_smooth()
- ####values = [True] * len(ob.data.polygons)
- ####ob.data.polygons.foreach_set("use_smooth", values)
-
- #for m, vis in zip(ob.modifiers, mod_visibility): m.show_viewport = vis
-
- end_time = time.time()
- print('Tissue: object "{}" tessellated in {:.4f} sec'.format(ob.name, end_time-start_time))
for mesh in bpy.data.meshes:
if not mesh.users: bpy.data.meshes.remove(mesh)
@@ -2645,13 +2262,6 @@ class update_tessellate(Operator):
try: o.select_set(True)
except: pass
- bpy.ops.object.mode_set(mode=starting_mode)
-
- # clean objects
- for o in bpy.data.objects:
- if o.name not in context.view_layer.objects and "temp" in o.name:
- bpy.data.objects.remove(o)
-
ob.tissue_tessellate.error_message = ""
# Restore Base visibility
@@ -2659,14 +2269,22 @@ class update_tessellate(Operator):
ob0.hide_viewport = ob0_hidev
ob0.hide_render = ob0_hider
# Restore Component visibility
- ob1.hide_set(ob1_hide)
- ob1.hide_viewport = ob1_hidev
- ob1.hide_render = ob1_hider
+ if component_mode == 'OBJECT':
+ ob1.hide_set(ob1_hide)
+ ob1.hide_viewport = ob1_hidev
+ ob1.hide_render = ob1_hider
# Restore Local visibility
for space, local0, local1 in zip(local_spaces, local_ob0, local_ob1):
ob0.local_view_set(space, local0)
ob1.local_view_set(space, local1)
+ bpy.data.objects.remove(new_ob)
+
+ remove_temp_objects()
+
+ tissue_time(tt, "Closing tessellation", levels=1)
+
+ tissue_time(start_time,'Tessellation of "{}"'.format(ob.name),levels=0)
return {'FINISHED'}
def check(self, context):
@@ -2674,10 +2292,10 @@ class update_tessellate(Operator):
class TISSUE_PT_tessellate(Panel):
bl_label = "Tissue Tools"
- bl_category = "Edit"
+ bl_category = "Tissue"
bl_space_type = "VIEW_3D"
bl_region_type = "UI"
- bl_options = {'DEFAULT_CLOSED'}
+ #bl_options = {'DEFAULT_OPEN'}
@classmethod
def poll(cls, context):
@@ -2687,50 +2305,75 @@ class TISSUE_PT_tessellate(Panel):
layout = self.layout
col = layout.column(align=True)
- col.label(text="Tessellate:")
- col.operator("object.tessellate")
- col.operator("object.dual_mesh_tessellated")
+ col.label(text="Generate:")
+ row = col.row(align=True)
+ row.operator("object.tissue_tessellate", text='Tessellate', icon='OBJECT_DATA').component_mode = 'OBJECT'
+ tss = row.operator("object.tissue_tessellate", text='', icon='OUTLINER_COLLECTION')
+ tss.component_mode = 'COLLECTION'
+ tss.component_coll = context.collection.name
+ row.operator("object.tissue_tessellate", text='', icon='MATERIAL').component_mode = 'MATERIALS'
+ #col.operator("object.tissue_tessellate_multi", text='Tessellate Multi')
+ col.operator("object.dual_mesh_tessellated", text='Dual Mesh', icon='SEQ_CHROMA_SCOPE')
+ col.separator()
+
+ #col.label(text="Curves:")
+ col.operator("object.tissue_convert_to_curve", icon='OUTLINER_OB_CURVE', text="Convert to Curve")
+ #row.operator("object.tissue_convert_to_curve_update", icon='FILE_REFRESH', text='')
+
col.separator()
- #col = layout.column(align=True)
- #col.label(text="Tessellate Edit:")
- #col.operator("object.settings_tessellate")
- col.operator("object.update_tessellate", icon='FILE_REFRESH')
+ col.operator("object.tissue_update_tessellate_deps", icon='FILE_REFRESH', text='Refresh') #####
- #col = layout.column(align=True)
- col.operator("mesh.rotate_face", icon='NDOF_TURN')
+ col.separator()
+ col.label(text="Rotate Faces:")
+ row = col.row(align=True)
+ row.operator("mesh.tissue_rotate_face_left", text='Left', icon='LOOP_BACK')
+ row.operator("mesh.tissue_rotate_face_flip", text='Flip', icon='UV_SYNC_SELECT')
+ row.operator("mesh.tissue_rotate_face_right", text='Right', icon='LOOP_FORWARDS')
col.separator()
col.label(text="Other:")
- col.operator("object.dual_mesh")
+ col.operator("object.dual_mesh", icon='SEQ_CHROMA_SCOPE')
+ col.operator("object.polyhedra_wireframe", icon='MOD_WIREFRAME', text='Polyhedra Wireframe')
col.operator("object.lattice_along_surface", icon="OUTLINER_OB_LATTICE")
- act = context.active_object
+ act = context.object
if act and act.type == 'MESH':
col.operator("object.uv_to_mesh", icon="UV")
+ if act.mode == 'EDIT':
+ col.separator()
+ col.label(text="Weight:")
+ col.operator("object.tissue_weight_distance", icon="TRACKING")
+ col.operator("object.tissue_weight_streamlines", icon="ANIM")
+
+ col.separator()
+ col.label(text="Materials:")
+ col.operator("object.random_materials", icon='COLOR')
+ col.operator("object.weight_to_materials", icon='GROUP_VERTEX')
+
+ col.separator()
+ col.label(text="Utils:")
+ col.operator("render.tissue_render_animation", icon='RENDER_ANIMATION')
class TISSUE_PT_tessellate_object(Panel):
bl_space_type = 'PROPERTIES'
bl_region_type = 'WINDOW'
bl_context = "data"
- bl_label = "Tissue - Tessellate"
+ bl_label = "Tissue Tessellate"
bl_options = {'DEFAULT_CLOSED'}
@classmethod
def poll(cls, context):
- try: return context.object.type == 'MESH'
+ try:
+ return context.object.type == 'MESH'
except: return False
def draw(self, context):
ob = context.object
props = ob.tissue_tessellate
- allowed_obj = ('MESH','CURVE','SURFACE','FONT', 'META')
+ tissue_props = ob.tissue
- try:
- bool_tessellated = props.generator or props.component != None
- ob0 = props.generator
- ob1 = props.component
- except: bool_tessellated = False
+ bool_tessellated = tissue_props.tissue_type == 'TESSELLATE'
layout = self.layout
if not bool_tessellated:
layout.label(text="The selected object is not a Tessellated object",
@@ -2743,250 +2386,770 @@ class TISSUE_PT_tessellate_object(Panel):
row = col.row(align=True)
set_tessellate_handler(self,context)
- set_animatable_fix_handler(self,context)
- row.prop(props, "bool_run", text="Animatable")
- row.operator("object.update_tessellate", icon='FILE_REFRESH')
-
- col = layout.column(align=True)
- row = col.row(align=True)
- row.label(text="BASE :")
- row.label(text="COMPONENT :")
- row = col.row(align=True)
-
+ ###### set_animatable_fix_handler(self,context)
+ row.operator("object.tissue_update_tessellate_deps", icon='FILE_REFRESH', text='Refresh') ####
+ lock_icon = 'LOCKED' if tissue_props.bool_lock else 'UNLOCKED'
+ #lock_icon = 'PINNED' if props.bool_lock else 'UNPINNED'
+ deps_icon = 'LINKED' if tissue_props.bool_dependencies else 'UNLINKED'
+ row.prop(tissue_props, "bool_dependencies", text="", icon=deps_icon)
+ row.prop(tissue_props, "bool_lock", text="", icon=lock_icon)
col2 = row.column(align=True)
- col2.prop_search(props, "generator", context.scene, "objects")
- row.separator()
- col2 = row.column(align=True)
- col2.prop_search(props, "component", context.scene, "objects")
+ col2.prop(tissue_props, "bool_run", text="",icon='TIME')
+ col2.enabled = not tissue_props.bool_lock
+ #layout.use_property_split = True
+ #layout.use_property_decorate = False # No animation.
+ col = layout.column(align=True)
+ col.label(text='Base object:')
row = col.row(align=True)
+ row.prop_search(props, "generator", context.scene, "objects")
col2 = row.column(align=True)
- col2.prop(props, "gen_modifiers", text="Use Modifiers", icon='MODIFIER')
- row.separator()
- try:
- if not (ob0.modifiers or ob0.data.shape_keys) or props.fill_mode == 'PATCH':
- col2.enabled = False
- except:
- col2.enabled = False
- col2 = row.column(align=True)
- col2.prop(props, "com_modifiers", text="Use Modifiers", icon='MODIFIER')
+ col2.prop(props, "gen_modifiers", text='Use Modifiers',icon='MODIFIER')
+ '''
try:
- if not (props.component.modifiers or props.component.data.shape_keys):
+ if not (props.generator.modifiers or props.generator.data.shape_keys):
col2.enabled = False
except:
col2.enabled = False
- col.separator()
+ '''
+ #col.separator()
- # Fill and Rotation
- row = col.row(align=True)
- row.label(text="Fill Mode:")
- row.separator()
- row.label(text="Rotation:")
- row = col.row(align=True)
+ layout.use_property_split = False
+ # Fill
+ col = layout.column(align=True)
+ col.label(text="Fill Mode:")
# fill
- row.prop(props, "fill_mode", text="", icon='NONE', expand=False,
+ row = col.row(align=True)
+ row.prop(props, "fill_mode", icon='NONE', expand=True,
slider=True, toggle=False, icon_only=False, event=False,
full_event=False, emboss=True, index=-1)
- row.separator()
- # rotation
- col2 = row.column(align=True)
- col2.prop(props, "rotation_mode", text="", icon='NONE', expand=False,
- slider=True, toggle=False, icon_only=False, event=False,
- full_event=False, emboss=True, index=-1)
+ #layout.use_property_split = True
+ col = layout.column(align=True)
+ col.prop(props, "bool_smooth")
- if props.rotation_mode == 'RANDOM':
- #row = col.row(align=True)
- col2.prop(props, "random_seed")
- if props.rotation_mode == 'UV':
- uv_error = False
- if props.fill_mode == 'FAN':
- row = col.row(align=True)
- row.label(text="UV rotation doesn't work in FAN mode",
- icon='ERROR')
- uv_error = True
- if props.generator.type != 'MESH':
- row = col.row(align=True)
- row.label(
- text="UV rotation supported only for Mesh objects",
- icon='ERROR')
- uv_error = True
+class TISSUE_PT_tessellate_frame(Panel):
+ bl_space_type = 'PROPERTIES'
+ bl_region_type = 'WINDOW'
+ bl_context = "data"
+ bl_parent_id = "TISSUE_PT_tessellate_object"
+ bl_label = "Frame Settings"
+ #bl_options = {'DEFAULT_CLOSED'}
+
+ @classmethod
+ def poll(cls, context):
+ try:
+ bool_frame = context.object.tissue_tessellate.fill_mode == 'FRAME'
+ bool_tessellated = context.object.tissue_tessellate.generator != None
+ return context.object.type == 'MESH' and bool_frame and bool_tessellated
+ except:
+ return False
+
+ def draw(self, context):
+ ob = context.object
+ props = ob.tissue_tessellate
+ layout = self.layout
+ col = layout.column(align=True)
+ row = col.row(align=True)
+ row.prop(props, "frame_mode", expand=True)
+ row = col.row(align=True)
+ row.prop(props, "frame_thickness", icon='NONE', expand=True)
+ col.separator()
+ row = col.row(align=True)
+ row.prop(props, "fill_frame", icon='NONE')
+ show_frame_mat = props.component_mode == 'MATERIALS' or props.bool_material_id
+ col2 = row.column(align=True)
+ col2.prop(props, "fill_frame_mat", icon='NONE')
+ col2.enabled = props.fill_frame and show_frame_mat
+ row = col.row(align=True)
+ row.prop(props, "frame_boundary", text='Boundary', icon='NONE')
+ col2 = row.column(align=True)
+ col2.prop(props, "boundary_mat_offset", icon='NONE')
+ col2.enabled = props.frame_boundary and show_frame_mat
+
+
+class TISSUE_PT_tessellate_component(Panel):
+ bl_space_type = 'PROPERTIES'
+ bl_region_type = 'WINDOW'
+ bl_context = "data"
+ bl_parent_id = "TISSUE_PT_tessellate_object"
+ bl_label = "Components"
+ #bl_options = {'DEFAULT_CLOSED'}
+
+ @classmethod
+ def poll(cls, context):
+ try:
+ bool_tessellated = context.object.tissue.tissue_type == 'TESSELLATE'
+ return context.object.type == 'MESH' and bool_tessellated
+ except:
+ return False
+
+ def draw(self, context):
+ ob = context.object
+ props = ob.tissue_tessellate
+
+ layout = self.layout
+ col = layout.column(align=True)
+ col.label(text='Component Mode:')
+ row = col.row(align=True)
+ row.prop(props, "component_mode", icon='NONE', expand=True,
+ slider=True, toggle=False, icon_only=False, event=False,
+ full_event=False, emboss=True, index=-1)
+
+ if props.component_mode == 'OBJECT':
+ col.separator()
+ row = col.row(align=True)
+ row.prop_search(props, "component", context.scene, "objects")
+ col2 = row.column(align=True)
+ col2.prop(props, "com_modifiers", text='Use Modifiers',icon='MODIFIER')
+ '''
+ try:
+ if not (props.component.modifiers or props.component.data.shape_keys):
+ col2.enabled = False
+ except:
+ col2.enabled = False
+ '''
+ elif props.component_mode == 'COLLECTION':
+ col.separator()
+
+ if props.component_coll in list(bpy.data.collections):
+ components = []
+ for o in props.component_coll.objects:
+ if o.type in allowed_objects() and o is not ob:
+ components.append(o.name)
+ n_comp = len(components)
+ if n_comp == 0:
+ col.label(text="Can't find components in the Collection.", icon='ERROR')
else:
- if len(props.generator.data.uv_layers) == 0:
- row = col.row(align=True)
- row.label(text="'" + props.generator.name +
- " doesn't have UV Maps", icon='ERROR')
- uv_error = True
- if uv_error:
+ text = "{} Component{}".format(n_comp,"s" if n_comp>1 else "")
row = col.row(align=True)
- row.label(text="Default rotation will be used instead",
- icon='INFO')
+ row.label(text=text, icon='OBJECT_DATA')
+ row.prop(props, "com_modifiers", text='Use Modifiers',icon='MODIFIER')
+ else:
+ col.label(text="Please, chose one Collection.", icon='ERROR')
- # component XY
+ col.separator()
row = col.row(align=True)
- row.label(text="Component Coordinates:")
+ row.prop_search(props,'component_coll',bpy.data,'collections')
+ col2 = row.column(align=True)
+ col2.prop(props, "coll_rand_seed")
+ col = layout.column(align=True)
row = col.row(align=True)
- row.prop(props, "mode", expand=True)
-
- if props.mode != 'BOUNDS':
+ ob0 = props.generator
+ row.prop_search(props, 'vertex_group_distribution',
+ ob0, "vertex_groups", text='')
+ col2 = row.column(align=True)
+ row2 = col2.row(align=True)
+ row2.prop(props, "invert_vertex_group_distribution", text="",
+ toggle=True, icon='ARROW_LEFTRIGHT')
+ row2.prop(props, "vertex_group_distribution_factor")
+ row2.enabled = props.vertex_group_distribution in ob0.vertex_groups.keys()
+ if props.fill_mode == 'FAN': col.prop(props, "consistent_wedges")
+ else:
+ components = []
+ for mat in props.generator.material_slots.keys():
+ if mat in bpy.data.objects.keys():
+ if bpy.data.objects[mat].type in allowed_objects():
+ components.append(mat)
+ n_comp = len(components)
+ if n_comp == 0:
+ col.label(text="Can't find components from the materials.", icon='ERROR')
+ else:
col.separator()
+ text = "{} Component{}".format(n_comp,"s" if n_comp>1 else "")
row = col.row(align=True)
- row.label(text="X:")
- row.prop(
- props, "bounds_x", text="Bounds X", icon='NONE', expand=True,
- slider=False, toggle=False, icon_only=False, event=False,
- full_event=False, emboss=True, index=-1)
+ row.label(text=text, icon='OBJECT_DATA')
+ row.prop(props, "com_modifiers", text='Use Modifiers',icon='MODIFIER')
+ if props.fill_mode != 'FRAME':
+ col.separator()
+ col.separator()
row = col.row(align=True)
- row.label(text="Y:")
- row.prop(
- props, "bounds_y", text="Bounds X", icon='NONE', expand=True,
- slider=False, toggle=False, icon_only=False, event=False,
- full_event=False, emboss=True, index=-1)
-
- # component Z
- col.label(text="Thickness:")
- row = col.row(align=True)
- row.prop(props, "scale_mode", expand=True)
- col.prop(props, "zscale", text="Scale", icon='NONE', expand=False,
- slider=True, toggle=False, icon_only=False, event=False,
- full_event=False, emboss=True, index=-1)
- if props.mode == 'BOUNDS':
- col.prop(props, "offset", text="Offset", icon='NONE', expand=False,
- slider=True, toggle=False, icon_only=False, event=False,
- full_event=False, emboss=True, index=-1)
+ row.label(text="Boundary Faces:")
+ row.prop(props, "boundary_mat_offset", icon='NONE')
+ row = col.row(align=True)
+ row.prop(props, "boundary_variable_offset", text='Variable Offset', icon='NONE')
+ row.prop(props, "auto_rotate_boundary", icon='NONE')
+ col.separator()
- # Direction
- row = col.row(align=True)
- row.label(text="Direction:")
+class TISSUE_PT_tessellate_coordinates(Panel):
+ bl_space_type = 'PROPERTIES'
+ bl_region_type = 'WINDOW'
+ bl_context = "data"
+ bl_parent_id = "TISSUE_PT_tessellate_object"
+ bl_label = "Components Coordinates"
+ bl_options = {'DEFAULT_CLOSED'}
+
+ @classmethod
+ def poll(cls, context):
+ try:
+ bool_tessellated = context.object.tissue.tissue_type == 'TESSELLATE'
+ return context.object.type == 'MESH' and bool_tessellated
+ except:
+ return False
+
+ def draw(self, context):
+ ob = context.object
+ props = ob.tissue_tessellate
+ layout = self.layout
+
+ col = layout.column(align=True)
+ # component XY
+ row = col.row(align=True)
+ row.prop(props, "mode", expand=True)
+
+ if props.mode != 'BOUNDS':
+ col.separator()
row = col.row(align=True)
+ row.label(text="X:")
row.prop(
- props, "normals_mode", text="Direction", icon='NONE', expand=True,
+ props, "bounds_x", text="Bounds X", icon='NONE', expand=True,
slider=False, toggle=False, icon_only=False, event=False,
full_event=False, emboss=True, index=-1)
- row.enabled = props.fill_mode != 'PATCH'
- # merge
- col = layout.column(align=True)
row = col.row(align=True)
- row.prop(props, "merge")
- if props.merge:
- row.prop(props, "merge_thres")
+ row.label(text="Y:")
+ row.prop(
+ props, "bounds_y", text="Bounds X", icon='NONE', expand=True,
+ slider=False, toggle=False, icon_only=False, event=False,
+ full_event=False, emboss=True, index=-1)
+
+
+class TISSUE_PT_tessellate_rotation(Panel):
+ bl_space_type = 'PROPERTIES'
+ bl_region_type = 'WINDOW'
+ bl_context = "data"
+ bl_parent_id = "TISSUE_PT_tessellate_object"
+ bl_label = "Rotation"
+ bl_options = {'DEFAULT_CLOSED'}
+
+ @classmethod
+ def poll(cls, context):
+ try:
+ bool_tessellated = context.object.tissue.tissue_type == 'TESSELLATE'
+ return context.object.type == 'MESH' and bool_tessellated
+ except:
+ return False
+
+ def draw(self, context):
+ ob = context.object
+ props = ob.tissue_tessellate
+ layout = self.layout
+ # rotation
+ layout.use_property_split = True
+ layout.use_property_decorate = False # No animation.
+ col = layout.column(align=True)
+ col.prop(props, "rotation_mode", text='Rotation', icon='NONE', expand=False,
+ slider=True, toggle=False, icon_only=False, event=False,
+ full_event=False, emboss=True, index=-1)
+ if props.rotation_mode == 'WEIGHT':
+ col.separator()
row = col.row(align=True)
- row.prop(props, "bool_smooth")
- if props.merge:
- col2 = row.column(align=True)
- col2.prop(props, "bool_dissolve_seams")
- #if props.component.type != 'MESH': col2.enabled = False
+ row.separator()
+ row.separator()
+ row.separator()
+ row.prop_search(props, 'vertex_group_rotation',
+ ob0, "vertex_groups", text='Vertex Group')
+ col2 = row.column(align=True)
+ col2.prop(props, "invert_vertex_group_rotation", text="", toggle=True, icon='ARROW_LEFTRIGHT')
+ col2.enabled = props.vertex_group_rotation in ob0.vertex_groups.keys()
+ col.separator()
+ col.prop(props, "rotation_direction", expand=False,
+ slider=True, toggle=False, icon_only=False, event=False,
+ full_event=False, emboss=True, index=-1)
+ if props.rotation_mode == 'RANDOM':
+ col.prop(props, "rand_seed")
+ col.prop(props, "rand_step")
+ else:
+ col.prop(props, "rotation_shift")
+
+ if props.rotation_mode == 'UV':
+ uv_error = False
+ if props.generator.type != 'MESH':
+ row = col.row(align=True)
+ row.label(
+ text="UV rotation supported only for Mesh objects",
+ icon='ERROR')
+ uv_error = True
+ else:
+ if len(props.generator.data.uv_layers) == 0:
+ row = col.row(align=True)
+ row.label(text="'" + props.generator.name +
+ " doesn't have UV Maps", icon='ERROR')
+ uv_error = True
+ if uv_error:
+ row = col.row(align=True)
+ row.label(text="Default rotation will be used instead",
+ icon='INFO')
+
+class TISSUE_PT_tessellate_thickness(Panel):
+ bl_space_type = 'PROPERTIES'
+ bl_region_type = 'WINDOW'
+ bl_context = "data"
+ bl_parent_id = "TISSUE_PT_tessellate_object"
+ bl_label = "Thickness"
+ #bl_options = {'DEFAULT_CLOSED'}
+
+ @classmethod
+ def poll(cls, context):
+ try: return context.object.tissue.tissue_type == 'TESSELLATE'
+ except: return False
+ def draw(self, context):
+ ob = context.object
+ props = ob.tissue_tessellate
+
+ layout = self.layout
+ #layout.use_property_split = True
+ col = layout.column(align=True)
+ # component Z
+ row = col.row(align=True)
+ row.prop(props, "scale_mode", expand=True)
+ col.prop(props, "zscale", text="Scale", icon='NONE', expand=False,
+ slider=True, toggle=False, icon_only=False, event=False,
+ full_event=False, emboss=True, index=-1)
+ if props.mode == 'BOUNDS':
row = col.row(align=True)
- row.prop(props, "cap_faces")
- if props.cap_faces:
- col2 = row.column(align=True)
- col2.prop(props, "open_edges_crease", text="Crease")
+ row.prop(props, "offset", text="Offset", icon='NONE', expand=False,
+ slider=True, toggle=False, icon_only=False, event=False,
+ full_event=False, emboss=True, index=-1)
+ row.enabled = not props.use_origin_offset
+ col.prop(props, 'use_origin_offset')
- # Advanced Settings
- col = layout.column(align=True)
- col.separator()
+ col.separator()
+ row = col.row(align=True)
+ ob0 = props.generator
+ row.prop_search(props, 'vertex_group_thickness',
+ ob0, "vertex_groups", text='')
+ col2 = row.column(align=True)
+ row2 = col2.row(align=True)
+ row2.prop(props, "invert_vertex_group_thickness", text="",
+ toggle=True, icon='ARROW_LEFTRIGHT')
+ row2.prop(props, "vertex_group_thickness_factor")
+ row2.enabled = props.vertex_group_thickness in ob0.vertex_groups.keys()
+
+class TISSUE_PT_tessellate_direction(Panel):
+ bl_space_type = 'PROPERTIES'
+ bl_region_type = 'WINDOW'
+ bl_context = "data"
+ bl_parent_id = "TISSUE_PT_tessellate_object"
+ bl_label = "Thickness Direction"
+ bl_options = {'DEFAULT_CLOSED'}
+
+ @classmethod
+ def poll(cls, context):
+ try:
+ return context.object.tissue.tissue_type == 'TESSELLATE'
+ except:
+ return False
+
+ def draw(self, context):
+ ob = context.object
+ props = ob.tissue_tessellate
+ layout = self.layout
+ ob0 = props.generator
+ #layout.use_property_split = True
+ col = layout.column(align=True)
+ row = col.row(align=True)
+ row.prop(
+ props, "normals_mode", text="Direction", icon='NONE', expand=True,
+ slider=False, toggle=False, icon_only=False, event=False,
+ full_event=False, emboss=True, index=-1)
+ if props.normals_mode == 'OBJECT':
col.separator()
row = col.row(align=True)
- row.prop(props, "bool_advanced", icon='SETTINGS')
- if props.bool_advanced:
- allow_multi = False
- allow_shapekeys = not props.com_modifiers
- for m in ob0.data.materials:
- try:
- o = bpy.data.objects[m.name]
- allow_multi = True
- try:
- if o.data.shape_keys is None: continue
- elif len(o.data.shape_keys.key_blocks) < 2: continue
- else: allow_shapekeys = not props.com_modifiers
- except: pass
- except: pass
- # DATA #
- col = layout.column(align=True)
- col.label(text="Morphing:")
+ row.prop_search(props, "target", context.scene, "objects", text='Target')
+ if props.warning_message_thickness != '':
+ col.separator()
+ col.label(text=props.warning_message_thickness, icon='ERROR')
+ if props.normals_mode != 'FACES':
+ col.separator()
+ col.prop(props, "smooth_normals")
+ if props.smooth_normals:
row = col.row(align=True)
- col2 = row.column(align=True)
- col2.prop(props, "bool_vertex_group", icon='GROUP_VERTEX')
- #col2.prop_search(props, "vertex_group", props.generator, "vertex_groups")
- try:
- if len(props.generator.vertex_groups) == 0:
- col2.enabled = False
- except:
- col2.enabled = False
+ row.prop(props, "smooth_normals_iter")
row.separator()
+ row.prop_search(props, 'vertex_group_smooth_normals',
+ ob0, "vertex_groups", text='')
col2 = row.column(align=True)
- row2 = col2.row(align=True)
- row2.prop(props, "bool_shapekeys", text="Use Shape Keys", icon='SHAPEKEY_DATA')
- row2.enabled = allow_shapekeys
+ col2.prop(props, "invert_vertex_group_smooth_normals", text="", toggle=True, icon='ARROW_LEFTRIGHT')
+ col2.enabled = props.vertex_group_smooth_normals in ob0.vertex_groups.keys()
+ if props.normals_mode == 'VERTS':
+ col.separator()
+ row = col.row(align=True)
+ row.prop(props, "normals_x")
+ row.prop(props, "normals_y")
+ row.prop(props, "normals_z")
+ row = col.row(align=True)
+ row.prop_search(props, 'vertex_group_scale_normals',
+ ob0, "vertex_groups", text='')
+ col2 = row.column(align=True)
+ col2.prop(props, "invert_vertex_group_scale_normals", text="", toggle=True, icon='ARROW_LEFTRIGHT')
+ col2.enabled = props.vertex_group_scale_normals in ob0.vertex_groups.keys()
+ if props.normals_mode in ('OBJECT', 'SHAPEKEYS'):
+ col.separator()
+ row = col.row(align=True)
+ row.prop(props, "even_thickness")
+ if props.even_thickness: row.prop(props, "even_thickness_iter")
- # LIMITED TESSELLATION
- col = layout.column(align=True)
- col.label(text="Limited Tessellation:")
- row = col.row(align=True)
- col2 = row.column(align=True)
- col2.prop(props, "bool_multi_components", icon='MOD_TINT')
- if not allow_multi:
- col2.enabled = False
- col.separator()
- row = col.row(align=True)
- col2 = row.column(align=True)
- col2.prop(props, "bool_selection", text="On selected Faces", icon='RESTRICT_SELECT_OFF')
- #if props.bool_material_id or props.bool_selection or props.bool_multi_components:
- #col2 = row.column(align=True)
- # col2.prop(props, "bool_combine")
- row.separator()
- if props.generator.type != 'MESH':
- col2.enabled = False
- col2 = row.column(align=True)
- col2.prop(props, "bool_material_id", icon='MATERIAL_DATA', text="Material ID")
- if props.bool_material_id and not props.bool_multi_components:
- #col2 = row.column(align=True)
- col2.prop(props, "material_id")
- if props.bool_multi_components:
- col2.enabled = False
+class TISSUE_PT_tessellate_options(Panel):
+ bl_space_type = 'PROPERTIES'
+ bl_region_type = 'WINDOW'
+ bl_context = "data"
+ bl_parent_id = "TISSUE_PT_tessellate_object"
+ bl_label = " "
+ bl_options = {'DEFAULT_CLOSED'}
+
+ @classmethod
+ def poll(cls, context):
+ try:
+ return context.object.tissue.tissue_type == 'TESSELLATE'
+ except:
+ return False
- # TRANSFER DATA ### OFF
- if props.fill_mode != 'PATCH' and False:
- col = layout.column(align=True)
- col.label(text="Component Data:")
+ def draw_header(self, context):
+ ob = context.object
+ props = ob.tissue_tessellate
+ self.layout.prop(props, "merge")
+
+ def draw(self, context):
+ ob = context.object
+ props = ob.tissue_tessellate
+ layout = self.layout
+ layout.use_property_split = True
+ layout.use_property_decorate = False # No animation.
+ col = layout.column(align=True)
+ if props.merge:
+ col.prop(props, "merge_thres")
+ col.prop(props, "merge_open_edges_only")
+ col.prop(props, "bool_dissolve_seams")
+ col.prop(props, "close_mesh")
+ if props.close_mesh in ('BRIDGE', 'BRIDGE_CAP'):
+ col.separator()
+ if props.close_mesh == 'BRIDGE_CAP':
+ if props.vertex_group_bridge_owner == 'BASE': ob_bridge = ob0
+ else: ob_bridge = ob1
row = col.row(align=True)
+ row.prop_search(props, 'vertex_group_bridge',
+ ob_bridge, "vertex_groups")
+ row.prop(props, "invert_vertex_group_bridge", text="",
+ toggle=True, icon='ARROW_LEFTRIGHT')
+ row = col.row(align=True)
+ row.prop(props, "vertex_group_bridge_owner", expand=True,
+ slider=False, toggle=False, icon_only=False, event=False,
+ full_event=False, emboss=True, index=-1)
col2 = row.column(align=True)
- col2.prop(props, "bool_materials", icon='MATERIAL_DATA')
- row.separator()
- col2 = row.column(align=True)
- if props.fill_mode == 'PATCH':
- col.enabled = False
- col.label(text='Not needed in Patch mode', icon='INFO')
-
+ row2 = col2.row(align=True)
+ col.prop(props, "bridge_edges_crease", text="Crease")
+ col.prop(props, "bridge_material_offset", text='Material Offset')
+ '''
+ if props.close_mesh == 'BRIDGE' and False:
+ col.separator()
+ col.prop(props, "bridge_cuts")
+ col.prop(props, "bridge_smoothness")
+ '''
+ if props.close_mesh in ('CAP', 'BRIDGE_CAP'):
+ #row = col.row(align=True)
col.separator()
- row = col.row(align=True)
- row.label(text='Reiterate Tessellation:', icon='FILE_REFRESH')
- row.prop(props, 'iterations', text='Repeat', icon='SETTINGS')
+ if props.close_mesh == 'BRIDGE_CAP':
+ if props.vertex_group_cap_owner == 'BASE': ob_cap = ob0
+ else: ob_cap = ob1
+ row = col.row(align=True)
+ row.prop_search(props, 'vertex_group_cap',
+ ob_cap, "vertex_groups")
+ row.prop(props, "invert_vertex_group_cap", text="",
+ toggle=True, icon='ARROW_LEFTRIGHT')
+ row = col.row(align=True)
+ row.prop(props, "vertex_group_cap_owner", expand=True,
+ slider=False, toggle=False, icon_only=False, event=False,
+ full_event=False, emboss=True, index=-1)
+ col.prop(props, "open_edges_crease", text="Crease")
+ col.prop(props, "cap_material_offset", text='Material Offset')
+ if props.warning_message_merge:
col.separator()
- row = col.row(align=True)
- row.label(text='Combine Iterations:')
- row = col.row(align=True)
- row.prop(
- props, "combine_mode", text="Combine:",icon='NONE', expand=True,
- slider=False, toggle=False, icon_only=False, event=False,
- full_event=False, emboss=True, index=-1)
-
-class rotate_face(Operator):
- bl_idname = "mesh.rotate_face"
- bl_label = "Rotate Faces"
- bl_description = "Rotate selected faces and update tessellated meshes"
+ col.label(text=props.warning_message_merge, icon='ERROR')
+
+class TISSUE_PT_tessellate_morphing(Panel):
+ bl_space_type = 'PROPERTIES'
+ bl_region_type = 'WINDOW'
+ bl_context = "data"
+ bl_parent_id = "TISSUE_PT_tessellate_object"
+ bl_label = "Weight and Morphing"
+ bl_options = {'DEFAULT_CLOSED'}
+
+ @classmethod
+ def poll(cls, context):
+ try: return context.object.tissue.tissue_type == 'TESSELLATE'
+ except: return False
+
+ def draw(self, context):
+ ob = context.object
+ props = ob.tissue_tessellate
+ layout = self.layout
+ allow_shapekeys = not props.com_modifiers
+
+ if tessellated(ob):
+ ob0 = props.generator
+ for m in ob0.data.materials:
+ try:
+ o = bpy.data.objects[m.name]
+ allow_multi = True
+ try:
+ if o.data.shape_keys is None: continue
+ elif len(o.data.shape_keys.key_blocks) < 2: continue
+ else: allow_shapekeys = not props.com_modifiers
+ except: pass
+ except: pass
+ col = layout.column(align=True)
+ #col.label(text="Morphing:")
+ row = col.row(align=True)
+ col2 = row.column(align=True)
+ col2.prop(props, "bool_vertex_group", icon='GROUP_VERTEX')
+ #col2.prop_search(props, "vertex_group", props.generator, "vertex_groups")
+ try:
+ if len(props.generator.vertex_groups) == 0:
+ col2.enabled = False
+ except:
+ col2.enabled = False
+ row.separator()
+ col2 = row.column(align=True)
+ row2 = col2.row(align=True)
+ row2.prop(props, "bool_shapekeys", text="Use Shape Keys", icon='SHAPEKEY_DATA')
+ row2.enabled = allow_shapekeys
+ if not allow_shapekeys:
+ col2 = layout.column(align=True)
+ row2 = col2.row(align=True)
+ row2.label(text="Component's Shape Keys cannot be used together with Component's Modifiers", icon='INFO')
+
+
+class TISSUE_PT_tessellate_selective(Panel):
+ bl_space_type = 'PROPERTIES'
+ bl_region_type = 'WINDOW'
+ bl_context = "data"
+ bl_parent_id = "TISSUE_PT_tessellate_object"
+ bl_label = "Selective"
+ bl_options = {'DEFAULT_CLOSED'}
+
+ @classmethod
+ def poll(cls, context):
+ try:
+ return context.object.tissue.tissue_type == 'TESSELLATE'
+ except:
+ return False
+
+ def draw(self, context):
+ ob = context.object
+ props = ob.tissue_tessellate
+
+ layout = self.layout
+ #layout.use_property_split = True
+ #layout.use_property_decorate = False # No animation.
+ allow_multi = False
+ allow_shapekeys = not props.com_modifiers
+ ob0 = props.generator
+ for m in ob0.data.materials:
+ try:
+ o = bpy.data.objects[m.name]
+ allow_multi = True
+ try:
+ if o.data.shape_keys is None: continue
+ elif len(o.data.shape_keys.key_blocks) < 2: continue
+ else: allow_shapekeys = not props.com_modifiers
+ except: pass
+ except: pass
+ # LIMITED TESSELLATION
+ col = layout.column(align=True)
+ #col.label(text="Limited Tessellation:")
+ row = col.row(align=True)
+ col2 = row.column(align=True)
+ col2.prop(props, "bool_selection", text="On selected Faces", icon='RESTRICT_SELECT_OFF')
+ row.separator()
+ if props.generator.type != 'MESH':
+ col2.enabled = False
+ col2 = row.column(align=True)
+ col2.prop(props, "bool_material_id", icon='MATERIAL_DATA', text="Material Index")
+ #if props.bool_material_id and not props.component_mode == 'MATERIALS':
+ #col2 = row.column(align=True)
+ col2.prop(props, "material_id")
+ #if props.component_mode == 'MATERIALS':
+ # col2.enabled = False
+
+ #col.separator()
+ #row = col.row(align=True)
+ #col2 = row.column(align=True)
+ #col2.prop(props, "bool_multi_components", icon='MOD_TINT')
+ #if not allow_multi:
+ # col2.enabled = False
+
+
+class TISSUE_PT_tessellate_iterations(Panel):
+ bl_space_type = 'PROPERTIES'
+ bl_region_type = 'WINDOW'
+ bl_context = "data"
+ bl_parent_id = "TISSUE_PT_tessellate_object"
+ bl_label = "Iterations"
+ bl_options = {'DEFAULT_CLOSED'}
+
+ @classmethod
+ def poll(cls, context):
+ try:
+ return context.object.tissue.tissue_type == 'TESSELLATE'
+ except:
+ return False
+
+ def draw(self, context):
+ ob = context.object
+ props = ob.tissue_tessellate
+ layout = self.layout
+ layout.use_property_split = True
+ layout.use_property_decorate = False # No animation.
+ col = layout.column(align=True)
+ row = col.row(align=True)
+ #row.label(text='', icon='FILE_REFRESH')
+ col.prop(props, 'iterations', text='Repeat')#, icon='FILE_REFRESH')
+ if props.iterations > 1 and props.fill_mode == 'PATCH':
+ col.separator()
+ #row = col.row(align=True)
+ col.prop(props, 'patch_subs')
+ layout.use_property_split = False
+ col = layout.column(align=True)
+ #row = col.row(align=True)
+ col.label(text='Combine Iterations:')
+ row = col.row(align=True)
+ row.prop(
+ props, "combine_mode", text="Combine:",icon='NONE', expand=True,
+ slider=False, toggle=False, icon_only=False, event=False,
+ full_event=False, emboss=True, index=-1)
+
+class tissue_rotate_face_right(Operator):
+ bl_idname = "mesh.tissue_rotate_face_right"
+ bl_label = "Tissue Rotate Faces Right"
+ bl_description = "Rotate clockwise selected faces and update tessellated meshes"
bl_options = {'REGISTER', 'UNDO'}
@classmethod
def poll(cls, context):
- return context.mode == 'EDIT_MESH'
+ try:
+ #bool_tessellated = context.object.tissue_tessellate.generator != None
+ ob = context.object
+ return ob.type == 'MESH' and ob.mode == 'EDIT'# and bool_tessellated
+ except:
+ return False
def execute(self, context):
- ob = bpy.context.active_object
+ ob = context.active_object
+ me = ob.data
+
+ bm = bmesh.from_edit_mesh(me)
+ mesh_select_mode = [sm for sm in context.tool_settings.mesh_select_mode]
+
+ for face in bm.faces:
+ if (face.select):
+ vs = face.verts[:]
+ vs2 = vs[-1:]+vs[:-1]
+ material_index = face.material_index
+ bm.faces.remove(face)
+ f2 = bm.faces.new(vs2)
+ f2.select = True
+ f2.material_index = material_index
+ bm.normal_update()
+
+ # trigger UI update
+ bmesh.update_edit_mesh(me)
+ bm.free()
+ ob.select_set(False)
+
+ # update tessellated meshes
+ bpy.ops.object.mode_set(mode='OBJECT')
+ for o in [obj for obj in bpy.data.objects if
+ obj.tissue_tessellate.generator == ob and obj.visible_get()]:
+ context.view_layer.objects.active = o
+
+ #override = {'object': o, 'mode': 'OBJECT', 'selected_objects': [o]}
+ if not o.tissue.bool_lock:
+ bpy.ops.object.tissue_update_tessellate()
+ o.select_set(False)
+ ob.select_set(True)
+ context.view_layer.objects.active = ob
+ bpy.ops.object.mode_set(mode='EDIT')
+ context.tool_settings.mesh_select_mode = mesh_select_mode
+
+ return {'FINISHED'}
+
+class tissue_rotate_face_flip(Operator):
+ bl_idname = "mesh.tissue_rotate_face_flip"
+ bl_label = "Tissue Rotate Faces Flip"
+ bl_description = "Fully rotate selected faces and update tessellated meshes"
+ bl_options = {'REGISTER', 'UNDO'}
+
+ @classmethod
+ def poll(cls, context):
+ try:
+ #bool_tessellated = context.object.tissue_tessellate.generator != None
+ ob = context.object
+ return ob.type == 'MESH' and ob.mode == 'EDIT'# and bool_tessellated
+ except:
+ return False
+
+ def execute(self, context):
+ ob = context.active_object
+ me = ob.data
+
+ bm = bmesh.from_edit_mesh(me)
+ mesh_select_mode = [sm for sm in context.tool_settings.mesh_select_mode]
+
+ for face in bm.faces:
+ if (face.select):
+ vs = face.verts[:]
+ nrot = int(len(vs)/2)
+ vs2 = vs[-nrot:]+vs[:-nrot]
+ material_index = face.material_index
+ bm.faces.remove(face)
+ f2 = bm.faces.new(vs2)
+ f2.select = True
+ f2.material_index = material_index
+ bm.normal_update()
+
+ # trigger UI update
+ bmesh.update_edit_mesh(me)
+ bm.free()
+ ob.select_set(False)
+
+ # update tessellated meshes
+ bpy.ops.object.mode_set(mode='OBJECT')
+ for o in [obj for obj in bpy.data.objects if
+ obj.tissue_tessellate.generator == ob and obj.visible_get()]:
+ context.view_layer.objects.active = o
+
+ #override = {'object': o, 'mode': 'OBJECT', 'selected_objects': [o]}
+ if not o.tissue.bool_lock:
+ bpy.ops.object.tissue_update_tessellate()
+ o.select_set(False)
+ ob.select_set(True)
+ context.view_layer.objects.active = ob
+ bpy.ops.object.mode_set(mode='EDIT')
+ context.tool_settings.mesh_select_mode = mesh_select_mode
+
+ return {'FINISHED'}
+
+class tissue_rotate_face_left(Operator):
+ bl_idname = "mesh.tissue_rotate_face_left"
+ bl_label = "Tissue Rotate Faces Left"
+ bl_description = "Rotate counterclockwise selected faces and update tessellated meshes"
+ bl_options = {'REGISTER', 'UNDO'}
+
+ @classmethod
+ def poll(cls, context):
+ try:
+ #bool_tessellated = context.object.tissue_tessellate.generator != None
+ ob = context.object
+ return ob.type == 'MESH' and ob.mode == 'EDIT'# and bool_tessellated
+ except:
+ return False
+
+ def execute(self, context):
+ ob = context.active_object
me = ob.data
bm = bmesh.from_edit_mesh(me)
@@ -3005,18 +3168,619 @@ class rotate_face(Operator):
# trigger UI update
bmesh.update_edit_mesh(me)
+ bm.free()
ob.select_set(False)
# update tessellated meshes
bpy.ops.object.mode_set(mode='OBJECT')
for o in [obj for obj in bpy.data.objects if
obj.tissue_tessellate.generator == ob and obj.visible_get()]:
- bpy.context.view_layer.objects.active = o
- bpy.ops.object.update_tessellate()
+ context.view_layer.objects.active = o
+ if not o.tissue.bool_lock:
+ bpy.ops.object.tissue_update_tessellate()
o.select_set(False)
ob.select_set(True)
- bpy.context.view_layer.objects.active = ob
+ context.view_layer.objects.active = ob
bpy.ops.object.mode_set(mode='EDIT')
context.tool_settings.mesh_select_mode = mesh_select_mode
return {'FINISHED'}
+
+
+def convert_to_frame(ob, props, use_modifiers):
+ new_ob = convert_object_to_mesh(ob, use_modifiers, True)
+
+ # create bmesh
+ bm = bmesh.new()
+ bm.from_mesh(new_ob.data)
+ bm.verts.ensure_lookup_table()
+ bm.edges.ensure_lookup_table()
+ bm.faces.ensure_lookup_table()
+ if props['bool_selection']:
+ original_faces = [f for f in bm.faces if f.select]
+ else:
+ original_faces = list(bm.faces)
+ # detect edge loops
+
+ loops = []
+ boundaries_mat = []
+ neigh_face_center = []
+ face_normals = []
+ # append boundary loops
+ if props['frame_boundary']:
+ #selected_edges = [e for e in bm.edges if e.select]
+ selected_edges = [e for e in bm.edges if e.is_boundary]
+ if len(selected_edges) > 0:
+ loop = []
+ count = 0
+ e0 = selected_edges[0]
+ face = e0.link_faces[0]
+ boundary_mat = [face.material_index + props['boundary_mat_offset']]
+ face_center = [face.calc_center_median()]
+ loop_normals = [face.normal]
+ selected_edges = selected_edges[1:]
+ if props['bool_vertex_group']:
+ n_verts = len(new_ob.data.vertices)
+ base_vg = [get_weight(vg,n_verts) for vg in new_ob.vertex_groups]
+ while True:
+ new_vert = None
+ face = None
+ for e1 in selected_edges:
+ if e1.verts[0] in e0.verts: new_vert = e1.verts[1]
+ elif e1.verts[1] in e0.verts: new_vert = e1.verts[0]
+ if new_vert != None:
+ if len(loop)==0:
+ loop = [v for v in e1.verts if v != new_vert]
+ loop.append(new_vert)
+ e0 = e1
+ face = e0.link_faces[0]
+ boundary_mat.append(face.material_index + props['boundary_mat_offset'])
+ face_center.append(face.calc_center_median())
+ loop_normals.append(face.normal)
+ selected_edges.remove(e0)
+ break
+ if new_vert == None:
+ try:
+ loops.append(loop)
+ loop = []
+ e0 = selected_edges[0]
+ selected_edges = selected_edges[1:]
+ boundaries_mat.append(boundary_mat)
+ neigh_face_center.append(face_center)
+ face_normals.append(loop_normals)
+ face = e0.link_faces[0]
+ boundary_mat = [face.material_index + props['boundary_mat_offset']]
+ face_center = [face.calc_center_median()]
+ loop_normals = [face.normal]
+ except: break
+ boundaries_mat.append(boundary_mat)
+ neigh_face_center.append(face_center)
+ face_normals.append(loop_normals)
+ # compute boundary frames
+ new_faces = []
+ vert_ids = []
+
+ # append regular faces
+ for f in original_faces:#bm.faces:
+ loop = list(f.verts)
+ loops.append(loop)
+ boundaries_mat.append([f.material_index for v in loop])
+ face_normals.append([f.normal for v in loop])
+
+ # calc areas for relative frame mode
+ if props['frame_mode'] == 'RELATIVE':
+ verts_area = []
+ for v in bm.verts:
+ linked_faces = v.link_faces
+ if len(linked_faces) > 0:
+ area = sum([sqrt(f.calc_area())/len(f.verts) for f in v.link_faces])*2
+ area /= len(linked_faces)
+ else: area = 0
+ verts_area.append(area)
+
+ for loop_index, loop in enumerate(loops):
+ is_boundary = loop_index < len(neigh_face_center)
+ materials = boundaries_mat[loop_index]
+ new_loop = []
+ loop_ext = [loop[-1]] + loop + [loop[0]]
+
+ # calc tangents
+ tangents = []
+ for i in range(len(loop)):
+ # vertices
+ vert0 = loop_ext[i]
+ vert = loop_ext[i+1]
+ vert1 = loop_ext[i+2]
+ # edge vectors
+ vec0 = (vert0.co - vert.co).normalized()
+ vec1 = (vert.co - vert1.co).normalized()
+ # tangent
+ _vec1 = -vec1
+ _vec0 = -vec0
+ ang = (pi - vec0.angle(vec1))/2
+ normal = face_normals[loop_index][i]
+ tan0 = normal.cross(vec0)
+ tan1 = normal.cross(vec1)
+ tangent = (tan0 + tan1).normalized()/sin(ang)*props['frame_thickness']
+ tangents.append(tangent)
+
+ # calc correct direction for boundaries
+ mult = -1
+ if is_boundary:
+ dir_val = 0
+ for i in range(len(loop)):
+ surf_point = neigh_face_center[loop_index][i]
+ tangent = tangents[i]
+ vert = loop_ext[i+1]
+ dir_val += tangent.dot(vert.co - surf_point)
+ if dir_val > 0: mult = 1
+
+ # add vertices
+ for i in range(len(loop)):
+ vert = loop_ext[i+1]
+ if props['frame_mode'] == 'RELATIVE': area = verts_area[vert.index]
+ else: area = 1
+ new_co = vert.co + tangents[i] * mult * area
+ # add vertex
+ new_vert = bm.verts.new(new_co)
+ new_loop.append(new_vert)
+ vert_ids.append(vert.index)
+ new_loop.append(new_loop[0])
+
+ # add faces
+ materials += [materials[0]]
+ for i in range(len(loop)):
+ v0 = loop_ext[i+1]
+ v1 = loop_ext[i+2]
+ v2 = new_loop[i+1]
+ v3 = new_loop[i]
+ face_verts = [v1,v0,v3,v2]
+ if mult == -1: face_verts = [v0,v1,v2,v3]
+ new_face = bm.faces.new(face_verts)
+ new_face.material_index = materials[i+1]
+ new_face.select = True
+ new_faces.append(new_face)
+ # fill frame
+ if props['fill_frame'] and not is_boundary:
+ n_verts = len(new_loop)-1
+ loop_center = Vector((0,0,0))
+ for v in new_loop[1:]: loop_center += v.co
+ loop_center /= n_verts
+ center = bm.verts.new(loop_center)
+ for i in range(n_verts):
+ v0 = new_loop[i+1]
+ v1 = new_loop[i]
+ face_verts = [v1,v0,center]
+ new_face = bm.faces.new(face_verts)
+ new_face.material_index = materials[i] + props['fill_frame_mat']
+ new_face.select = True
+ new_faces.append(new_face)
+ #bpy.ops.object.mode_set(mode='OBJECT')
+ #for f in bm.faces: f.select_set(f not in new_faces)
+ for f in original_faces: bm.faces.remove(f)
+ bm.to_mesh(new_ob.data)
+ # propagate vertex groups
+ if props['bool_vertex_group']:
+ base_vg = []
+ for vg in new_ob.vertex_groups:
+ vertex_group = []
+ for v in bm.verts:
+ try:
+ vertex_group.append(vg.weight(v.index))
+ except:
+ vertex_group.append(0)
+ base_vg.append(vertex_group)
+ new_vert_ids = range(len(bm.verts)-len(vert_ids),len(bm.verts))
+ for vg_id, vg in enumerate(new_ob.vertex_groups):
+ for ii, jj in zip(vert_ids, new_vert_ids):
+ vg.add([jj], base_vg[vg_id][ii], 'REPLACE')
+ new_ob.data.update()
+ bm.free()
+ return new_ob
+
+def reduce_to_quads(ob, props):
+ '''
+ Convert an input object to a mesh with polygons that have maximum 4 vertices
+ '''
+ new_ob = convert_object_to_mesh(ob, props['gen_modifiers'], True)
+ me = new_ob.data
+
+ # Check if there are polygons with more than 4 sides
+ np_sides = get_attribute_numpy(me.polygons, 'loop_total')
+ mask = np_sides > 4
+ if not np.any(mask):
+ if props['boundary_mat_offset'] != 0 or props['boundary_variable_offset']:
+ bm=bmesh.new()
+ bm.from_mesh(me)
+ bm = offset_boundary_materials(
+ bm,
+ boundary_mat_offset = props['boundary_mat_offset'],
+ boundary_variable_offset = props['boundary_variable_offset'],
+ auto_rotate_boundary = props['auto_rotate_boundary'])
+ bm.to_mesh(me)
+ bm.free()
+ me.update()
+ return new_ob
+
+ # create bmesh
+ bm = bmesh.new()
+ bm.from_mesh(me)
+ bm.verts.ensure_lookup_table()
+ bm.edges.ensure_lookup_table()
+ bm.faces.ensure_lookup_table()
+
+ np_faces = np.array(bm.faces)
+ np_faces = np_faces[mask]
+
+ new_faces = []
+ for f in np_faces:
+ verts = list(f.verts)
+ while True:
+ n_verts = len(verts)
+ if n_verts < 3: break
+ elif n_verts == 3:
+ face_verts = [verts[-2], verts.pop(-1), verts.pop(0)]
+ else:
+ face_verts = [verts[-2], verts.pop(-1), verts.pop(0), verts[0]]
+ new_face = bm.faces.new(face_verts)
+ new_face.material_index = f.material_index
+ new_face.select = f.select
+ new_faces.append(new_face)
+
+ for f in np_faces: bm.faces.remove(f)
+
+ bm = offset_boundary_materials(
+ bm,
+ boundary_mat_offset = props['boundary_mat_offset'],
+ boundary_variable_offset = props['boundary_variable_offset'],
+ auto_rotate_boundary = props['auto_rotate_boundary'])
+
+ bm.to_mesh(me)
+ bm.free()
+ me.update()
+ return new_ob
+
+def convert_to_fan(ob, props, add_id_layer=False):
+ new_ob = convert_object_to_mesh(ob, props['gen_modifiers'], True)
+ bm = bmesh.new()
+ bm.from_mesh(new_ob.data)
+ if add_id_layer:
+ bm.faces.ensure_lookup_table()
+ lay = bm.faces.layers.int.new("id")
+ for i,f in enumerate(bm.faces): f[lay] = i
+ bmesh.ops.poke(bm, faces=bm.faces)#, quad_method, ngon_method)
+ bm = offset_boundary_materials(
+ bm,
+ boundary_mat_offset = props['boundary_mat_offset'],
+ boundary_variable_offset = props['boundary_variable_offset'],
+ auto_rotate_boundary = props['auto_rotate_boundary'])
+ bm.to_mesh(new_ob.data)
+ new_ob.data.update()
+ bm.free()
+ return new_ob
+
+def convert_to_triangles(ob, props):
+ new_ob = convert_object_to_mesh(ob, props['gen_modifiers'], True)
+ bm = bmesh.new()
+ bm.from_mesh(new_ob.data)
+ bmesh.ops.triangulate(bm, faces=bm.faces, quad_method='FIXED', ngon_method='BEAUTY')
+
+ bm = offset_boundary_materials(
+ bm,
+ boundary_mat_offset = props['boundary_mat_offset'],
+ boundary_variable_offset = props['boundary_variable_offset'],
+ auto_rotate_boundary = props['auto_rotate_boundary'])
+
+ bm.to_mesh(new_ob.data)
+ new_ob.data.update()
+ bm.free()
+ return new_ob
+
+def merge_components(ob, props, use_bmesh):
+
+ if not use_bmesh and False:
+ skip = True
+ ob.active_shape_key_index = 1
+ if ob.data.shape_keys != None:
+ for sk in ob.data.shape_keys.key_blocks:
+ if skip:
+ skip = False
+ continue
+ sk.mute = True
+ ob.data.update()
+ bpy.ops.object.mode_set(mode='EDIT')
+ bpy.ops.object.mode_set(mode='OBJECT')
+ if ob.data.shape_keys != None:
+ for sk in ob.data.shape_keys.key_blocks:
+ sk.mute = False
+ ob.data.update()
+
+ bpy.ops.object.mode_set(mode='EDIT')
+ bpy.ops.mesh.select_mode(
+ use_extend=False, use_expand=False, type='VERT')
+ bpy.ops.mesh.select_non_manifold(
+ extend=False, use_wire=True, use_boundary=True,
+ use_multi_face=False, use_non_contiguous=False, use_verts=False)
+
+ bpy.ops.mesh.remove_doubles(
+ threshold=props.merge_thres, use_unselected=False)
+
+ if props.bool_dissolve_seams:
+ bpy.ops.mesh.select_mode(type='EDGE')
+ bpy.ops.mesh.select_all(action='DESELECT')
+ bpy.ops.object.mode_set(mode='OBJECT')
+ for e in new_ob.data.edges:
+ e.select = e.use_seam
+ bpy.ops.object.mode_set(mode='EDIT')
+ bpy.ops.mesh.dissolve_edges()
+ bpy.ops.object.mode_set(mode='OBJECT')
+
+ if props.close_mesh != 'NONE':
+ bpy.ops.object.mode_set(mode='EDIT')
+ bpy.ops.mesh.select_mode(
+ use_extend=False, use_expand=False, type='EDGE')
+ bpy.ops.mesh.select_non_manifold(
+ extend=False, use_wire=False, use_boundary=True,
+ use_multi_face=False, use_non_contiguous=False, use_verts=False)
+ if props.close_mesh == 'CAP':
+ if props.open_edges_crease != 0:
+ bpy.ops.transform.edge_crease(value=props.open_edges_crease)
+ bpy.ops.mesh.edge_face_add()
+ bpy.ops.object.mode_set(mode='OBJECT')
+ for f in ob.data.polygons:
+ if f.select: f.material_index += props.cap_material_offset
+ elif props.close_mesh == 'BRIDGE':
+ try:
+ if props.bridge_edges_crease != 0:
+ bpy.ops.transform.edge_crease(value=props.bridge_edges_crease)
+ bpy.ops.mesh.bridge_edge_loops(
+ type='PAIRS',
+ number_cuts=props.bridge_cuts,
+ interpolation='SURFACE',
+ smoothness=props.bridge_smoothness)
+ bpy.ops.object.mode_set(mode='OBJECT')
+ for f in ob.data.polygons:
+ if f.select: f.material_index += props.bridge_material_offset
+ except: pass
+ elif props.close_mesh == 'BRIDGE_CAP':
+ # BRIDGE
+ try:
+ bpy.ops.object.mode_set(mode='OBJECT')
+ vg = ob.vertex_groups[props.vertex_group_bridge]
+ weight = get_weight_numpy(vg, len(ob.data.vertices))
+ for e in ob.data.edges:
+ if weight[e.vertices[0]]*weight[e.vertices[1]] < 1:
+ e.select = False
+ bpy.ops.object.mode_set(mode='EDIT')
+ if props.bridge_edges_crease != 0:
+ bpy.ops.transform.edge_crease(value=props.bridge_edges_crease)
+ bpy.ops.mesh.bridge_edge_loops(
+ type='PAIRS',
+ number_cuts=props.bridge_cuts,
+ interpolation='SURFACE',
+ smoothness=props.bridge_smoothness)
+ for f in ob.data.polygons:
+ if f.select: f.material_index += props.bridge_material_offset
+ bpy.ops.mesh.select_all(action='DESELECT')
+ bpy.ops.mesh.select_non_manifold(
+ extend=False, use_wire=False, use_boundary=True,
+ use_multi_face=False, use_non_contiguous=False, use_verts=False)
+ bpy.ops.object.mode_set(mode='OBJECT')
+ except: pass
+ # CAP
+ try:
+ bpy.ops.object.mode_set(mode='OBJECT')
+ vg = ob.vertex_groups[props.vertex_group_cap]
+ weight = get_weight_numpy(vg, len(ob.data.vertices))
+ for e in ob.data.edges:
+ if weight[e.vertices[0]]*weight[e.vertices[1]] < 1:
+ e.select = False
+ bpy.ops.object.mode_set(mode='EDIT')
+ if props.open_edges_crease != 0:
+ bpy.ops.transform.edge_crease(value=props.open_edges_crease)
+ bpy.ops.mesh.edge_face_add()
+ for f in ob.data.polygons:
+ if f.select: f.material_index += props.cap_material_offset
+ bpy.ops.object.mode_set(mode='OBJECT')
+ except: pass
+ else:
+ bm = bmesh.new()
+ bm.from_mesh(ob.data.copy())
+ if props.merge_open_edges_only:
+ boundary_verts = [v for v in bm.verts if v.is_boundary or v.is_wire]
+ else:
+ boundary_verts = bm.verts
+ bmesh.ops.remove_doubles(bm, verts=boundary_verts, dist=props.merge_thres)
+
+ if props.bool_dissolve_seams:
+ seam_edges = [e for e in bm.edges if e.seam]
+ bmesh.ops.dissolve_edges(bm, edges=seam_edges, use_verts=True, use_face_split=False)
+ if props.close_mesh != 'NONE':
+ bm.edges.ensure_lookup_table()
+ # set crease
+ crease_layer = bm.edges.layers.crease.verify()
+ boundary_edges = [e for e in bm.edges if e.is_boundary or e.is_wire]
+ if props.close_mesh == 'BRIDGE':
+ try:
+ for e in boundary_edges:
+ e[crease_layer] = props.bridge_edges_crease
+ closed = bmesh.ops.bridge_loops(bm, edges=boundary_edges, use_pairs=True)
+ for f in closed['faces']: f.material_index += props.bridge_material_offset
+ except:
+ bm.to_mesh(ob.data)
+ return 'bridge_error'
+ elif props.close_mesh == 'CAP':
+ for e in boundary_edges:
+ e[crease_layer] = props.open_edges_crease
+ closed = bmesh.ops.holes_fill(bm, edges=boundary_edges)
+ for f in closed['faces']: f.material_index += props.cap_material_offset
+ elif props.close_mesh == 'BRIDGE_CAP':
+ # BRIDGE
+ dvert_lay = bm.verts.layers.deform.active
+ try:
+ dvert_lay = bm.verts.layers.deform.active
+ group_index = ob.vertex_groups[props.vertex_group_bridge].index
+ bw = bmesh_get_weight_numpy(group_index, dvert_lay, bm.verts)
+ if props.invert_vertex_group_bridge: bw = 1-bw
+ bridge_edges = [e for e in boundary_edges if bw[e.verts[0].index]*bw[e.verts[1].index] >= 1]
+ for e in bridge_edges:
+ e[crease_layer] = props.bridge_edges_crease
+ closed = bmesh.ops.bridge_loops(bm, edges=bridge_edges, use_pairs=True)
+ for f in closed['faces']: f.material_index += props.bridge_material_offset
+ boundary_edges = [e for e in bm.edges if e.is_boundary]
+ except: pass
+ # CAP
+ try:
+ dvert_lay = bm.verts.layers.deform.active
+ group_index = ob.vertex_groups[props.vertex_group_cap].index
+ bw = bmesh_get_weight_numpy(group_index, dvert_lay, bm.verts)
+ if props.invert_vertex_group_cap: bw = 1-bw
+ cap_edges = [e for e in boundary_edges if bw[e.verts[0].index]*bw[e.verts[1].index] >= 1]
+ for e in cap_edges:
+ e[crease_layer] = props.open_edges_crease
+ closed = bmesh.ops.holes_fill(bm, edges=cap_edges)
+ for f in closed['faces']: f.material_index += props.cap_material_offset
+ except: pass
+ bm.to_mesh(ob.data)
+
+class tissue_render_animation(Operator):
+ bl_idname = "render.tissue_render_animation"
+ bl_label = "Tissue Render Animation"
+ bl_description = "Turnaround for issues related to animatable tessellation"
+ bl_options = {'REGISTER', 'UNDO'}
+
+ start = True
+ path = ""
+ timer = None
+
+ def invoke(self, context, event):
+ self.start = True
+ return context.window_manager.invoke_props_dialog(self)
+
+ def draw(self, context):
+ layout = self.layout
+ col = layout.column(align=True)
+ col.label(text="All frames will be rendered in the background.")
+ col.label(text="Press ESC to abort.")
+
+ def modal(self, context, event):
+ '''
+ # check render format
+ format = context.scene.render.image_settings.file_format
+ if format in ('FFMPEG', 'AVI_RAW', 'AVI_JPEG'):
+ message = "Please use an image format as render output"
+ self.report({'ERROR'}, message)
+ return {'CANCELLED'}
+ '''
+ remove_tessellate_handler()
+ scene = context.scene
+ if event.type == 'ESC' or scene.frame_current >= scene.frame_end:
+ scene.render.filepath = self.path
+ # set again the handler
+ blender_handlers = bpy.app.handlers.frame_change_post
+ blender_handlers.append(anim_tessellate)
+ blender_handlers.append(reaction_diffusion_scene)
+ context.window_manager.event_timer_remove(self.timer)
+ if event.type == 'ESC':
+ print("Tissue: Render Animation aborted.")
+ return {'CANCELLED'}
+ else:
+ print("Tissue: Render Animation completed!")
+ return {'FINISHED'}
+ else:
+ self.execute(context)
+ return {'RUNNING_MODAL'}
+
+ def execute(self, context):
+ # check output format
+ format = context.scene.render.image_settings.file_format
+ if format in ('FFMPEG', 'AVI_RAW', 'AVI_JPEG'):
+ message = "Please use an image format as render output"
+ self.report({'ERROR'}, message)
+ return {'CANCELLED'}
+
+ scene = context.scene
+ if self.start:
+ remove_tessellate_handler()
+ reaction_diffusion_remove_handler(self, context)
+ scene = context.scene
+ scene.frame_current = scene.frame_start
+ self.path = scene.render.filepath
+ context.window_manager.modal_handler_add(self)
+ self.timer = context.window_manager.event_timer_add(0.1, window = context.window)
+ self.start = False
+ else:
+ scene.frame_current += scene.frame_step
+ anim_tessellate(scene)
+ reaction_diffusion_scene(scene)
+ scene.render.filepath = "{}{:04d}".format(self.path,scene.frame_current)
+ bpy.ops.render.render(write_still=True)
+ return {'RUNNING_MODAL'}
+
+def offset_boundary_materials(bm, boundary_mat_offset=0, boundary_variable_offset=False, auto_rotate_boundary=False):
+ if boundary_mat_offset != 0 or boundary_variable_offset:
+ bm.edges.ensure_lookup_table()
+ bm.faces.ensure_lookup_table()
+ bound_faces = []
+ bound_verts_value = [0]*len(bm.faces)
+ bound_edges_value = [0]*len(bm.faces)
+ shift_faces = [0]*len(bm.faces)
+ # store boundaries informations
+ for v in bm.verts:
+ if v.is_boundary:
+ for f in v.link_faces:
+ bound_faces.append(f)
+ bound_verts_value[f.index] += 1
+ for e in bm.edges:
+ if e.is_boundary:
+ for f in e.link_faces:
+ bound_edges_value[f.index] += 1
+ # Set material index offset
+ if boundary_variable_offset:
+ for f in bm.faces:
+ if bound_verts_value[f.index] > 0:
+ f.material_index += boundary_mat_offset
+ if bound_verts_value[f.index] == bound_edges_value[f.index]+1:
+ f.material_index += bound_verts_value[f.index]
+ else:
+ for f in bm.faces:
+ if bound_edges_value[f.index] > 0:
+ f.material_index += boundary_mat_offset
+ if auto_rotate_boundary:
+ rotate_faces = []
+ new_verts_all = []
+ for f in bm.faces:
+ val = bound_verts_value[f.index]
+ val2 = bound_edges_value[f.index]
+ if val > 0 and val2 == val-1 and val < len(f.verts):
+ pattern = [v.is_boundary for v in f.verts]
+ new_verts = [v for v in f.verts]
+ while True:
+ mult = 1
+ _pattern = pattern[val//2+1:] + pattern[:val//2+1]
+ for p in _pattern[-val:]: mult*=p
+ if mult == 1: break
+ pattern = pattern[-1:] + pattern[:-1]
+ new_verts = new_verts[-1:] + new_verts[:-1]
+ new_verts_all.append(new_verts)
+ rotate_faces.append(f)
+ if val == 4 and val2 == 3:
+ pattern = [e.is_boundary for e in f.edges]
+ new_verts = [v for v in f.verts]
+ while True:
+ mult = 1
+ _pattern = pattern[val2//2+1:] + pattern[:val2//2+1]
+ for p in _pattern[-val2:]: mult*=p
+ if mult == 1: break
+ pattern = pattern[-1:] + pattern[:-1]
+ new_verts = new_verts[-1:] + new_verts[:-1]
+ new_verts_all.append(new_verts)
+ rotate_faces.append(f)
+ for f, new_verts in zip(rotate_faces, new_verts_all):
+ material_index = f.material_index
+ bm.faces.remove(f)
+ f2 = bm.faces.new(new_verts)
+ f2.select = True
+ f2.material_index = material_index
+ bm.normal_update()
+ return bm
diff --git a/mesh_tissue/tissue_properties.py b/mesh_tissue/tissue_properties.py
new file mode 100644
index 00000000..433e60ea
--- /dev/null
+++ b/mesh_tissue/tissue_properties.py
@@ -0,0 +1,1060 @@
+# ##### BEGIN GPL LICENSE BLOCK #####
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License
+# as published by the Free Software Foundation; either version 2
+# of the License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+#
+# ##### END GPL LICENSE BLOCK #####
+
+# ---------------------------- ADAPTIVE DUPLIFACES --------------------------- #
+# ------------------------------- version 0.84 ------------------------------- #
+# #
+# Creates duplicates of selected mesh to active morphing the shape according #
+# to target faces. #
+# #
+# (c) Alessandro Zomparelli #
+# (2017) #
+# #
+# http://www.co-de-it.com/ #
+# #
+# ############################################################################ #
+
+import bpy
+from bpy.types import (
+ Operator,
+ Panel,
+ PropertyGroup,
+ )
+from bpy.props import (
+ BoolProperty,
+ EnumProperty,
+ FloatProperty,
+ IntProperty,
+ StringProperty,
+ PointerProperty
+)
+from . import config
+
+
+def update_dependencies(ob, objects):
+ type = ob.tissue.tissue_type
+ if type == 'NONE': return objects
+ if ob.tissue.bool_dependencies:
+ deps = get_deps(ob)
+ for o in deps:
+ if o.tissue.tissue_type == 'NONE' or o.tissue.bool_lock or o in objects:
+ continue
+ objects.append(o)
+ objects = update_dependencies(o, objects)
+ return objects
+
+def get_deps(ob):
+ type = ob.tissue.tissue_type
+ if type == 'TESSELLATE':
+ return [ob.tissue_tessellate.generator, ob.tissue_tessellate.component]
+ elif type == 'TO_CURVE':
+ return [ob.tissue_to_curve.object]
+ else: return []
+
+def anim_tessellate_active(self, context):
+ ob = context.object
+ props = ob.tissue_tessellate
+ if not (ob.tissue.bool_lock or props.bool_hold):
+ try:
+ props.generator.name
+ if props.component_mode == 'OBJECT':
+ props.component.name
+ elif props.component_mode == 'COLLECTION':
+ props.component_coll.name
+ bpy.ops.object.tissue_update_tessellate()
+ except: pass
+
+def anim_tessellate_object(ob):
+ try:
+ #bpy.context.view_layer.objects.active = ob
+ bpy.ops.object.tissue_update_tessellate()
+ except:
+ return None
+
+#from bpy.app.handlers import persistent
+
+
+def anim_tessellate(scene, depsgraph=None):
+ print('Tissue: animating tessellations...')
+
+ #config.evaluatedDepsgraph = depsgraph
+
+ try:
+ active_object = bpy.context.object
+ old_mode = bpy.context.object.mode
+ selected_objects = bpy.context.selected_objects
+ except: active_object = old_mode = selected_objects = None
+
+ if old_mode in ('OBJECT', 'PAINT_WEIGHT'):
+ update_objects = []
+ for ob in scene.objects:
+ if ob.tissue.bool_run and not ob.tissue.bool_lock:
+ if ob not in update_objects: update_objects.append(ob)
+ update_objects = list(reversed(update_dependencies(ob, update_objects)))
+ for ob in update_objects:
+ #override = {'object': ob}
+ for window in bpy.context.window_manager.windows:
+ screen = window.screen
+ for area in screen.areas:
+ if area.type == 'VIEW_3D':
+ override = bpy.context.copy()
+ override['window'] = window
+ override['screen'] = screen
+ override['area'] = area
+ override['selected_objects'] = [ob]
+ override['object'] = ob
+ override['active_object'] = ob
+ override['selected_editable_objects'] = [ob]
+ override['mode'] = 'OBJECT'
+ override['view_layer'] = scene.view_layers[0]
+ break
+ if ob.tissue.tissue_type == 'TESSELLATE':
+ bpy.ops.object.tissue_update_tessellate(override)
+ elif ob.tissue.tissue_type == 'TO_CURVE':
+ bpy.ops.object.tissue_convert_to_curve_update(override)
+
+ if old_mode != None:
+ objects = bpy.context.view_layer.objects
+ objects.active = active_object
+ for o in objects: o.select_set(o in selected_objects)
+ bpy.ops.object.mode_set(mode=old_mode)
+
+ config.evaluatedDepsgraph = None
+ print('end')
+ return
+'''
+def OLD_anim_tessellate(scene, depsgraph):
+ print('Tissue: animating tessellations...')
+
+ #global evaluatedDepsgraph
+ #print(evaluatedDepsgraph)
+ print(config.evaluatedDepsgraph)
+ config.evaluatedDepsgraph = depsgraph
+ print(config.evaluatedDepsgraph)
+
+ try:
+ active_object = bpy.context.object
+ old_mode = bpy.context.object.mode
+ selected_objects = bpy.context.selected_objects
+ except: active_object = old_mode = selected_objects = None
+
+ if old_mode in ('OBJECT', 'PAINT_WEIGHT') or True:
+ update_objects = []
+ for ob in scene.objects:
+ if ob.tissue.bool_run and not ob.tissue.bool_lock:
+ if ob not in update_objects: update_objects.append(ob)
+ update_objects = list(reversed(update_dependencies(ob, update_objects)))
+ for ob in update_objects:
+ for window in bpy.context.window_manager.windows:
+ screen = window.screen
+ for area in screen.areas:
+ if area.type == 'VIEW_3D':
+ override = bpy.context.copy()
+ override['window'] = window
+ override['screen'] = screen
+ override['area'] = area
+ override['selected_objects'] = [ob]
+ override['object'] = ob
+ override['active_object'] = ob
+ override['selected_editable_objects'] = [ob]
+ override['mode'] = 'OBJECT'
+ override['view_layer'] = scene.view_layers[0]
+ break
+ bpy.ops.object.tissue_update_tessellate(override)
+
+ config.evaluatedDepsgraph = None
+ print('end')
+ print(config.evaluatedDepsgraph)
+ return
+'''
+def remove_tessellate_handler():
+ tissue_handlers = []
+ blender_handlers = bpy.app.handlers.frame_change_post
+ for h in blender_handlers:
+ if "anim_tessellate" in str(h):
+ tissue_handlers.append(h)
+ for h in tissue_handlers: blender_handlers.remove(h)
+
+def set_tessellate_handler(self, context):
+
+ remove_tessellate_handler()
+ for o in context.scene.objects:
+ if o.tissue.bool_run:
+ blender_handlers = bpy.app.handlers.frame_change_post
+ blender_handlers.append(anim_tessellate)
+ break
+ return
+
+
+class tissue_prop(PropertyGroup):
+ bool_lock : BoolProperty(
+ name="Lock",
+ description="Prevent automatic update on settings changes or if other objects have it in the hierarchy.",
+ default=False
+ )
+ bool_dependencies : BoolProperty(
+ name="Update Dependencies",
+ description="Automatically updates source objects, when possible",
+ default=False
+ )
+ bool_run : BoolProperty(
+ name="Animatable",
+ description="Automatically recompute the geometry when the frame is changed. Tessellations may not work using the default Render Animation",
+ default = False,
+ update = set_tessellate_handler
+ )
+ tissue_type : EnumProperty(
+ items=(
+ ('NONE', "None", ""),
+ ('TESSELLATE', "Tessellate", ""),
+ ('TO_CURVE', "To Curve", "")
+ ),
+ default='NONE',
+ name=""
+ )
+
+class tissue_tessellate_prop(PropertyGroup):
+ bool_hold : BoolProperty(
+ name="Hold",
+ description="Wait...",
+ default=False
+ )
+ zscale : FloatProperty(
+ name="Scale", default=1, soft_min=0, soft_max=10,
+ description="Scale factor for the component thickness",
+ update = anim_tessellate_active
+ )
+ component_mode : EnumProperty(
+ items=(
+ ('OBJECT', "Object", "Use the same component object for all the faces"),
+ ('COLLECTION', "Collection", "Use multiple components from Collection"),
+ ('MATERIALS', "Materials", "Use multiple components by materials name")
+ ),
+ default='OBJECT',
+ name="Component Mode",
+ update = anim_tessellate_active
+ )
+ scale_mode : EnumProperty(
+ items=(
+ ('CONSTANT', "Constant", "Uniform thinkness"),
+ ('ADAPTIVE', "Relative", "Preserve component's proportions")
+ ),
+ default='ADAPTIVE',
+ name="Z-Scale according to faces size",
+ update = anim_tessellate_active
+ )
+ offset : FloatProperty(
+ name="Surface Offset",
+ default=1,
+ min=-1,
+ max=1,
+ soft_min=-1,
+ soft_max=1,
+ description="Surface offset",
+ update = anim_tessellate_active
+ )
+ mode : EnumProperty(
+ items=(
+ ('BOUNDS', "Bounds", "The component fits automatically the size of the target face"),
+ ('LOCAL', "Local", "Based on Local coordinates, from 0 to 1"),
+ ('GLOBAL', 'Global', "Based on Global coordinates, from 0 to 1")),
+ default='BOUNDS',
+ name="Component Mode",
+ update = anim_tessellate_active
+ )
+ rotation_mode : EnumProperty(
+ items=(('RANDOM', "Random", "Random faces rotation"),
+ ('UV', "Active UV", "Rotate according to UV coordinates"),
+ ('WEIGHT', "Weight Gradient", "Rotate according to Vertex Group gradient"),
+ ('DEFAULT', "Default", "Default rotation")),
+ default='DEFAULT',
+ name="Component Rotation",
+ update = anim_tessellate_active
+ )
+ rotation_direction : EnumProperty(
+ items=(('ORTHO', "Orthogonal", "Component main directions in XY"),
+ ('DIAG', "Diagonal", "Component main direction aligned with diagonal")),
+ default='ORTHO',
+ name="Direction",
+ update = anim_tessellate_active
+ )
+ rotation_shift : IntProperty(
+ name="Shift",
+ default=0,
+ soft_min=0,
+ soft_max=3,
+ description="Shift components rotation",
+ update = anim_tessellate_active
+ )
+ fill_mode : EnumProperty(
+ items=(
+ ('TRI', 'Tri', 'Triangulate the base mesh'),
+ ('QUAD', 'Quad', 'Regular quad tessellation. Uses only 3 or 4 vertices'),
+ ('FAN', 'Fan', 'Radial tessellation for polygonal faces'),
+ ('PATCH', 'Patch', 'Curved tessellation according to the last ' +
+ 'Subsurf\n(or Multires) modifiers. Works only with 4 sides ' +
+ 'patches.\nAfter the last Subsurf (or Multires) only ' +
+ 'deformation\nmodifiers can be used'),
+ ('FRAME', 'Frame', 'Tessellation along the edges of each face')),
+ default='QUAD',
+ name="Fill Mode",
+ update = anim_tessellate_active
+ )
+ combine_mode : EnumProperty(
+ items=(
+ ('LAST', 'Last', 'Show only the last iteration'),
+ ('UNUSED', 'Unused', 'Combine each iteration with the unused faces of the previous iteration. Used for branching systems'),
+ ('ALL', 'All', 'Combine the result of all iterations')),
+ default='LAST',
+ name="Combine Mode",
+ update = anim_tessellate_active
+ )
+ gen_modifiers : BoolProperty(
+ name="Generator Modifiers",
+ default=True,
+ description="Apply Modifiers and Shape Keys to the base object",
+ update = anim_tessellate_active
+ )
+ com_modifiers : BoolProperty(
+ name="Component Modifiers",
+ default=True,
+ description="Apply Modifiers and Shape Keys to the component object",
+ update = anim_tessellate_active
+ )
+ merge : BoolProperty(
+ name="Merge",
+ default=False,
+ description="Merge vertices in adjacent duplicates",
+ update = anim_tessellate_active
+ )
+ merge_open_edges_only : BoolProperty(
+ name="Open edges only",
+ default=False,
+ description="Merge only open edges",
+ update = anim_tessellate_active
+ )
+ merge_thres : FloatProperty(
+ name="Distance",
+ default=0.0001,
+ soft_min=0,
+ soft_max=10,
+ description="Limit below which to merge vertices",
+ update = anim_tessellate_active
+ )
+ generator : PointerProperty(
+ type=bpy.types.Object,
+ name="",
+ description="Base object for the tessellation",
+ update = anim_tessellate_active
+ )
+ component : PointerProperty(
+ type=bpy.types.Object,
+ name="",
+ description="Component object for the tessellation",
+ #default="",
+ update = anim_tessellate_active
+ )
+ component_coll : PointerProperty(
+ type=bpy.types.Collection,
+ name="",
+ description="Use objects inside the collection",
+ #default="",
+ update = anim_tessellate_active
+ )
+ target : PointerProperty(
+ type=bpy.types.Object,
+ name="",
+ description="Target object for custom direction",
+ #default="",
+ update = anim_tessellate_active
+ )
+ even_thickness : BoolProperty(
+ name="Even Thickness",
+ default=False,
+ description="Iterative sampling method for determine the correct length of the vectors (Experimental)",
+ update = anim_tessellate_active
+ )
+ even_thickness_iter : IntProperty(
+ name="Even Thickness Iterations",
+ default=3,
+ min = 1,
+ soft_max = 20,
+ description="More iterations produces more accurate results but make the tessellation slower",
+ update = anim_tessellate_active
+ )
+ bool_random : BoolProperty(
+ name="Randomize",
+ default=False,
+ description="Randomize component rotation",
+ update = anim_tessellate_active
+ )
+ rand_seed : IntProperty(
+ name="Seed",
+ default=0,
+ soft_min=0,
+ soft_max=50,
+ description="Random seed",
+ update = anim_tessellate_active
+ )
+ coll_rand_seed : IntProperty(
+ name="Seed",
+ default=0,
+ soft_min=0,
+ soft_max=50,
+ description="Random seed",
+ update = anim_tessellate_active
+ )
+ rand_step : IntProperty(
+ name="Steps",
+ default=1,
+ min=1,
+ soft_max=2,
+ description="Random step",
+ update = anim_tessellate_active
+ )
+ bool_vertex_group : BoolProperty(
+ name="Map Vertex Group",
+ default=False,
+ description="Transfer all Vertex Groups from Base object",
+ update = anim_tessellate_active
+ )
+ bool_selection : BoolProperty(
+ name="On selected Faces",
+ default=False,
+ description="Create Tessellation only on selected faces",
+ update = anim_tessellate_active
+ )
+ bool_shapekeys : BoolProperty(
+ name="Use Shape Keys",
+ default=False,
+ description="Transfer Component's Shape Keys. If the name of Vertex "
+ "Groups and Shape Keys are the same, they will be "
+ "automatically combined",
+ update = anim_tessellate_active
+ )
+ bool_smooth : BoolProperty(
+ name="Smooth Shading",
+ default=False,
+ description="Output faces with smooth shading rather than flat shaded",
+ update = anim_tessellate_active
+ )
+ bool_materials : BoolProperty(
+ name="Transfer Materials",
+ default=False,
+ description="Preserve component's materials",
+ update = anim_tessellate_active
+ )
+ bool_material_id : BoolProperty(
+ name="Tessellation on Material ID",
+ default=False,
+ description="Apply the component only on the selected Material",
+ update = anim_tessellate_active
+ )
+ material_id : IntProperty(
+ name="Index",
+ default=0,
+ min=0,
+ description="Only the faces with the chosen Material Index will be used",
+ update = anim_tessellate_active
+ )
+ bool_dissolve_seams : BoolProperty(
+ name="Dissolve Seams",
+ default=False,
+ description="Dissolve all seam edges",
+ update = anim_tessellate_active
+ )
+ iterations : IntProperty(
+ name="Iterations",
+ default=1,
+ min=1,
+ soft_max=5,
+ description="Automatically repeat the Tessellation using the "
+ + "generated geometry as new base object.\nUsefull for "
+ + "for branching systems. Dangerous!",
+ update = anim_tessellate_active
+ )
+ bool_combine : BoolProperty(
+ name="Combine unused",
+ default=False,
+ description="Combine the generated geometry with unused faces",
+ update = anim_tessellate_active
+ )
+ bool_advanced : BoolProperty(
+ name="Advanced Settings",
+ default=False,
+ description="Show more settings"
+ )
+ normals_mode : EnumProperty(
+ items=(
+ ('VERTS', 'Normals', 'Consistent direction based on vertices normal'),
+ ('FACES', 'Faces', 'Based on individual faces normal'),
+ ('CUSTOM', 'Custom', 'Custom split normals'),
+ ('SHAPEKEYS', 'Keys', "According to base object's shape keys"),
+ ('OBJECT', 'Object', "According to a target object")),
+ default='VERTS',
+ name="Direction",
+ update = anim_tessellate_active
+ )
+ error_message : StringProperty(
+ name="Error Message",
+ default=""
+ )
+ warning_message : StringProperty(
+ name="Warning Message",
+ default=""
+ )
+ warning_message_thickness : StringProperty(
+ name="Warning Message Thickness",
+ default=""
+ )
+ warning_message_merge : StringProperty(
+ name="Warning Message Merge",
+ default=""
+ )
+ bounds_x : EnumProperty(
+ items=(
+ ('EXTEND', 'Extend', 'Default X coordinates'),
+ ('CLIP', 'Clip', 'Trim out of bounds in X direction'),
+ ('CYCLIC', 'Cyclic', 'Cyclic components in X direction')),
+ default='EXTEND',
+ name="Bounds X",
+ update = anim_tessellate_active
+ )
+ bounds_y : EnumProperty(
+ items=(
+ ('EXTEND', 'Extend', 'Default Y coordinates'),
+ ('CLIP', 'Clip', 'Trim out of bounds in Y direction'),
+ ('CYCLIC', 'Cyclic', 'Cyclic components in Y direction')),
+ default='EXTEND',
+ name="Bounds Y",
+ update = anim_tessellate_active
+ )
+ close_mesh : EnumProperty(
+ items=(
+ ('NONE', 'None', 'Keep the mesh open'),
+ ('CAP', 'Cap Holes', 'Automatically cap open loops'),
+ ('BRIDGE', 'Bridge Open Loops', 'Automatically bridge loop pairs'),
+ ('BRIDGE_CAP', 'Custom', 'Bridge loop pairs and cap holes according to vertex groups')),
+ default='NONE',
+ name="Close Mesh",
+ update = anim_tessellate_active
+ )
+ cap_faces : BoolProperty(
+ name="Cap Holes",
+ default=False,
+ description="Cap open edges loops",
+ update = anim_tessellate_active
+ )
+ frame_boundary : BoolProperty(
+ name="Frame Boundary",
+ default=False,
+ description="Support face boundaries",
+ update = anim_tessellate_active
+ )
+ fill_frame : BoolProperty(
+ name="Fill Frame",
+ default=False,
+ description="Fill inner faces with Fan tessellation",
+ update = anim_tessellate_active
+ )
+ boundary_mat_offset : IntProperty(
+ name="Material Offset",
+ default=0,
+ description="Material Offset for boundaries (with Multi Components or Material ID)",
+ update = anim_tessellate_active
+ )
+ fill_frame_mat : IntProperty(
+ name="Material Offset",
+ default=0,
+ description="Material Offset for inner faces (with Multi Components or Material ID)",
+ update = anim_tessellate_active
+ )
+ open_edges_crease : FloatProperty(
+ name="Open Edges Crease",
+ default=0,
+ min=0,
+ max=1,
+ description="Automatically set crease for open edges",
+ update = anim_tessellate_active
+ )
+ bridge_edges_crease : FloatProperty(
+ name="Bridge Edges Crease",
+ default=0,
+ min=0,
+ max=1,
+ description="Automatically set crease for bridge edges",
+ update = anim_tessellate_active
+ )
+ bridge_smoothness : FloatProperty(
+ name="Smoothness",
+ default=1,
+ min=0,
+ max=1,
+ description="Bridge Smoothness",
+ update = anim_tessellate_active
+ )
+ frame_thickness : FloatProperty(
+ name="Frame Thickness",
+ default=0.2,
+ min=0,
+ soft_max=2,
+ description="Frame Thickness",
+ update = anim_tessellate_active
+ )
+ frame_mode : EnumProperty(
+ items=(
+ ('CONSTANT', 'Constant', 'Even thickness'),
+ ('RELATIVE', 'Relative', 'Frame offset depends on face areas')),
+ default='CONSTANT',
+ name="Offset",
+ update = anim_tessellate_active
+ )
+ bridge_cuts : IntProperty(
+ name="Cuts",
+ default=0,
+ min=0,
+ max=20,
+ description="Bridge Cuts",
+ update = anim_tessellate_active
+ )
+ cap_material_offset : IntProperty(
+ name="Material Offset",
+ default=0,
+ min=0,
+ description="Material index offset for the cap faces",
+ update = anim_tessellate_active
+ )
+ bridge_material_offset : IntProperty(
+ name="Material Offset",
+ default=0,
+ min=0,
+ description="Material index offset for the bridge faces",
+ update = anim_tessellate_active
+ )
+ patch_subs : IntProperty(
+ name="Patch Subdivisions",
+ default=0,
+ min=0,
+ description="Subdivisions levels for Patch tessellation after the first iteration",
+ update = anim_tessellate_active
+ )
+ use_origin_offset : BoolProperty(
+ name="Align to Origins",
+ default=False,
+ description="Define offset according to components origin and local Z coordinate",
+ update = anim_tessellate_active
+ )
+
+ vertex_group_thickness : StringProperty(
+ name="Thickness weight", default='',
+ description="Vertex Group used for thickness",
+ update = anim_tessellate_active
+ )
+ invert_vertex_group_thickness : BoolProperty(
+ name="Invert", default=False,
+ description="Invert the vertex group influence",
+ update = anim_tessellate_active
+ )
+ vertex_group_thickness_factor : FloatProperty(
+ name="Factor",
+ default=0,
+ min=0,
+ max=1,
+ description="Thickness factor to use for zero vertex group influence",
+ update = anim_tessellate_active
+ )
+
+ vertex_group_cap_owner : EnumProperty(
+ items=(
+ ('BASE', 'Base', 'Use base vertex group'),
+ ('COMP', 'Component', 'Use component vertex group')),
+ default='COMP',
+ name="Source",
+ update = anim_tessellate_active
+ )
+ vertex_group_cap : StringProperty(
+ name="Cap Vertex Group", default='',
+ description="Vertex Group used for cap open edges",
+ update = anim_tessellate_active
+ )
+ invert_vertex_group_cap : BoolProperty(
+ name="Invert", default=False,
+ description="Invert the vertex group influence",
+ update = anim_tessellate_active
+ )
+
+ vertex_group_bridge_owner : EnumProperty(
+ items=(
+ ('BASE', 'Base', 'Use base vertex group'),
+ ('COMP', 'Component', 'Use component vertex group')),
+ default='COMP',
+ name="Source",
+ update = anim_tessellate_active
+ )
+ vertex_group_bridge : StringProperty(
+ name="Bridge Vertex Group", default='',
+ description="Vertex Group used for bridge open edges",
+ update = anim_tessellate_active
+ )
+ invert_vertex_group_bridge : BoolProperty(
+ name="Invert", default=False,
+ description="Invert the vertex group influence",
+ update = anim_tessellate_active
+ )
+
+ vertex_group_rotation : StringProperty(
+ name="Rotation weight", default='',
+ description="Vertex Group used for rotation",
+ update = anim_tessellate_active
+ )
+ invert_vertex_group_rotation : BoolProperty(
+ name="Invert", default=False,
+ description="Invert the vertex group influence",
+ update = anim_tessellate_active
+ )
+ smooth_normals : BoolProperty(
+ name="Smooth Normals", default=False,
+ description="Smooth normals of the surface in order to reduce intersections",
+ update = anim_tessellate_active
+ )
+ smooth_normals_iter : IntProperty(
+ name="Iterations",
+ default=5,
+ min=0,
+ description="Smooth iterations",
+ update = anim_tessellate_active
+ )
+ smooth_normals_uv : FloatProperty(
+ name="UV Anisotropy",
+ default=0,
+ min=-1,
+ max=1,
+ description="0 means no anisotropy, -1 represent the U direction, while 1 represent the V direction",
+ update = anim_tessellate_active
+ )
+ vertex_group_smooth_normals : StringProperty(
+ name="Smooth normals weight", default='',
+ description="Vertex Group used for smooth normals",
+ update = anim_tessellate_active
+ )
+ invert_vertex_group_smooth_normals : BoolProperty(
+ name="Invert", default=False,
+ description="Invert the vertex group influence",
+ update = anim_tessellate_active
+ )
+
+ vertex_group_distribution : StringProperty(
+ name="Distribution weight", default='',
+ description="Vertex Group used for gradient distribution",
+ update = anim_tessellate_active
+ )
+ invert_vertex_group_distribution : BoolProperty(
+ name="Invert", default=False,
+ description="Invert the vertex group influence",
+ update = anim_tessellate_active
+ )
+ vertex_group_distribution_factor : FloatProperty(
+ name="Factor",
+ default=0,
+ min=0,
+ max=1,
+ description="Randomness factor to use for zero vertex group influence",
+ update = anim_tessellate_active
+ )
+ consistent_wedges : BoolProperty(
+ name="Consistent Wedges", default=True,
+ description="Use same component for the wedges generated by the Fan tessellation",
+ update = anim_tessellate_active
+ )
+ normals_x : FloatProperty(
+ name="X", default=1, min=0, max=1,
+ description="Scale X component of the normals",
+ update = anim_tessellate_active
+ )
+ normals_y : FloatProperty(
+ name="Y", default=1, min=0, max=1,
+ description="Scale Y component of the normals",
+ update = anim_tessellate_active
+ )
+ normals_z : FloatProperty(
+ name="Z", default=1, min=0, max=1,
+ description="Scale Z component of the normals",
+ update = anim_tessellate_active
+ )
+ vertex_group_scale_normals : StringProperty(
+ name="Scale normals weight", default='',
+ description="Vertex Group used for editing the normals directions",
+ update = anim_tessellate_active
+ )
+ invert_vertex_group_scale_normals : BoolProperty(
+ name="Invert", default=False,
+ description="Invert the vertex group influence",
+ update = anim_tessellate_active
+ )
+ boundary_variable_offset : BoolProperty(
+ name="Boundary Variable Offset", default=False,
+ description="Additional material offset based on the number of boundary vertices",
+ update = anim_tessellate_active
+ )
+ auto_rotate_boundary : BoolProperty(
+ name="Automatic Rotation", default=False,
+ description="Automatically rotate the boundary faces",
+ update = anim_tessellate_active
+ )
+
+def store_parameters(operator, ob):
+ ob.tissue_tessellate.bool_hold = True
+ if operator.generator in bpy.data.objects.keys():
+ ob.tissue_tessellate.generator = bpy.data.objects[operator.generator]
+ if operator.component in bpy.data.objects.keys():
+ ob.tissue_tessellate.component = bpy.data.objects[operator.component]
+ if operator.component_coll in bpy.data.collections.keys():
+ ob.tissue_tessellate.component_coll = bpy.data.collections[operator.component_coll]
+ if operator.target in bpy.data.objects.keys():
+ ob.tissue_tessellate.target = bpy.data.objects[operator.target]
+ ob.tissue_tessellate.even_thickness = operator.even_thickness
+ ob.tissue_tessellate.even_thickness_iter = operator.even_thickness_iter
+ ob.tissue_tessellate.zscale = operator.zscale
+ ob.tissue_tessellate.offset = operator.offset
+ ob.tissue_tessellate.gen_modifiers = operator.gen_modifiers
+ ob.tissue_tessellate.com_modifiers = operator.com_modifiers
+ ob.tissue_tessellate.mode = operator.mode
+ ob.tissue_tessellate.rotation_mode = operator.rotation_mode
+ ob.tissue_tessellate.rotation_shift = operator.rotation_shift
+ ob.tissue_tessellate.rotation_direction = operator.rotation_direction
+ ob.tissue_tessellate.merge = operator.merge
+ ob.tissue_tessellate.merge_open_edges_only = operator.merge_open_edges_only
+ ob.tissue_tessellate.merge_thres = operator.merge_thres
+ ob.tissue_tessellate.scale_mode = operator.scale_mode
+ ob.tissue_tessellate.bool_random = operator.bool_random
+ ob.tissue_tessellate.rand_seed = operator.rand_seed
+ ob.tissue_tessellate.coll_rand_seed = operator.coll_rand_seed
+ ob.tissue_tessellate.rand_step = operator.rand_step
+ ob.tissue_tessellate.fill_mode = operator.fill_mode
+ ob.tissue_tessellate.bool_vertex_group = operator.bool_vertex_group
+ ob.tissue_tessellate.bool_selection = operator.bool_selection
+ ob.tissue_tessellate.bool_shapekeys = operator.bool_shapekeys
+ ob.tissue_tessellate.bool_smooth = operator.bool_smooth
+ ob.tissue_tessellate.bool_materials = operator.bool_materials
+ ob.tissue_tessellate.bool_material_id = operator.bool_material_id
+ ob.tissue_tessellate.material_id = operator.material_id
+ ob.tissue_tessellate.bool_dissolve_seams = operator.bool_dissolve_seams
+ ob.tissue_tessellate.iterations = operator.iterations
+ ob.tissue_tessellate.bool_advanced = operator.bool_advanced
+ ob.tissue_tessellate.normals_mode = operator.normals_mode
+ ob.tissue_tessellate.bool_combine = operator.bool_combine
+ ob.tissue_tessellate.combine_mode = operator.combine_mode
+ ob.tissue_tessellate.bounds_x = operator.bounds_x
+ ob.tissue_tessellate.bounds_y = operator.bounds_y
+ ob.tissue_tessellate.cap_faces = operator.cap_faces
+ ob.tissue_tessellate.close_mesh = operator.close_mesh
+ ob.tissue_tessellate.bridge_cuts = operator.bridge_cuts
+ ob.tissue_tessellate.bridge_smoothness = operator.bridge_smoothness
+ ob.tissue_tessellate.frame_thickness = operator.frame_thickness
+ ob.tissue_tessellate.frame_mode = operator.frame_mode
+ ob.tissue_tessellate.frame_boundary = operator.frame_boundary
+ ob.tissue_tessellate.fill_frame = operator.fill_frame
+ ob.tissue_tessellate.boundary_mat_offset = operator.boundary_mat_offset
+ ob.tissue_tessellate.fill_frame_mat = operator.fill_frame_mat
+ ob.tissue_tessellate.cap_material_offset = operator.cap_material_offset
+ ob.tissue_tessellate.patch_subs = operator.patch_subs
+ ob.tissue_tessellate.use_origin_offset = operator.use_origin_offset
+ ob.tissue_tessellate.vertex_group_thickness = operator.vertex_group_thickness
+ ob.tissue_tessellate.invert_vertex_group_thickness = operator.invert_vertex_group_thickness
+ ob.tissue_tessellate.vertex_group_thickness_factor = operator.vertex_group_thickness_factor
+ ob.tissue_tessellate.vertex_group_distribution = operator.vertex_group_distribution
+ ob.tissue_tessellate.invert_vertex_group_distribution = operator.invert_vertex_group_distribution
+ ob.tissue_tessellate.vertex_group_distribution_factor = operator.vertex_group_distribution_factor
+ ob.tissue_tessellate.vertex_group_cap_owner = operator.vertex_group_cap_owner
+ ob.tissue_tessellate.vertex_group_cap = operator.vertex_group_cap
+ ob.tissue_tessellate.invert_vertex_group_cap = operator.invert_vertex_group_cap
+ ob.tissue_tessellate.vertex_group_bridge_owner = operator.vertex_group_bridge_owner
+ ob.tissue_tessellate.vertex_group_bridge = operator.vertex_group_bridge
+ ob.tissue_tessellate.invert_vertex_group_bridge = operator.invert_vertex_group_bridge
+ ob.tissue_tessellate.vertex_group_rotation = operator.vertex_group_rotation
+ ob.tissue_tessellate.invert_vertex_group_rotation = operator.invert_vertex_group_rotation
+ ob.tissue_tessellate.smooth_normals = operator.smooth_normals
+ ob.tissue_tessellate.smooth_normals_iter = operator.smooth_normals_iter
+ ob.tissue_tessellate.smooth_normals_uv = operator.smooth_normals_uv
+ ob.tissue_tessellate.vertex_group_smooth_normals = operator.vertex_group_smooth_normals
+ ob.tissue_tessellate.invert_vertex_group_smooth_normals = operator.invert_vertex_group_smooth_normals
+ ob.tissue_tessellate.component_mode = operator.component_mode
+ ob.tissue_tessellate.consistent_wedges = operator.consistent_wedges
+ ob.tissue_tessellate.normals_x = operator.normals_x
+ ob.tissue_tessellate.normals_y = operator.normals_y
+ ob.tissue_tessellate.normals_z = operator.normals_z
+ ob.tissue_tessellate.vertex_group_scale_normals = operator.vertex_group_scale_normals
+ ob.tissue_tessellate.invert_vertex_group_scale_normals = operator.invert_vertex_group_scale_normals
+ ob.tissue_tessellate.boundary_variable_offset = operator.boundary_variable_offset
+ ob.tissue_tessellate.auto_rotate_boundary = operator.auto_rotate_boundary
+ ob.tissue_tessellate.bool_hold = False
+ return ob
+
+def load_parameters(operator, ob):
+ operator.generator = ob.tissue_tessellate.generator.name
+ operator.component = ob.tissue_tessellate.component.name
+ operator.component_coll = ob.tissue_tessellate.component_coll.name
+ operator.zscale = ob.tissue_tessellate.zscale
+ operator.offset = ob.tissue_tessellate.offset
+ operator.gen_modifiers = ob.tissue_tessellate.gen_modifiers
+ operator.com_modifiers = ob.tissue_tessellate.com_modifiers
+ operator.mode = ob.tissue_tessellate.mode
+ operator.rotation_mode = ob.tissue_tessellate.rotation_mode
+ operator.rotation_shift = ob.tissue_tessellate.rotation_shift
+ operator.rotation_direction = ob.tissue_tessellate.rotation_direction
+ operator.merge = ob.tissue_tessellate.merge
+ operator.merge_open_edges_only = ob.tissue_tessellate.merge_open_edges_only
+ operator.merge_thres = ob.tissue_tessellate.merge_thres
+ operator.scale_mode = ob.tissue_tessellate.scale_mode
+ operator.bool_random = ob.tissue_tessellate.bool_random
+ operator.rand_seed = ob.tissue_tessellate.rand_seed
+ operator.coll_rand_seed = ob.tissue_tessellate.coll_rand_seed
+ operator.rand_step = ob.tissue_tessellate.rand_step
+ operator.fill_mode = ob.tissue_tessellate.fill_mode
+ operator.bool_vertex_group = ob.tissue_tessellate.bool_vertex_group
+ operator.bool_selection = ob.tissue_tessellate.bool_selection
+ operator.bool_shapekeys = ob.tissue_tessellate.bool_shapekeys
+ operator.bool_smooth = ob.tissue_tessellate.bool_smooth
+ operator.bool_materials = ob.tissue_tessellate.bool_materials
+ operator.bool_material_id = ob.tissue_tessellate.bool_material_id
+ operator.material_id = ob.tissue_tessellate.material_id
+ operator.bool_dissolve_seams = ob.tissue_tessellate.bool_dissolve_seams
+ operator.iterations = ob.tissue_tessellate.iterations
+ operator.bool_advanced = ob.tissue_tessellate.bool_advanced
+ operator.normals_mode = ob.tissue_tessellate.normals_mode
+ operator.bool_combine = ob.tissue_tessellate.bool_combine
+ operator.combine_mode = ob.tissue_tessellate.combine_mode
+ operator.bounds_x = ob.tissue_tessellate.bounds_x
+ operator.bounds_y = ob.tissue_tessellate.bounds_y
+ operator.cap_faces = ob.tissue_tessellate.cap_faces
+ operator.close_mesh = ob.tissue_tessellate.close_mesh
+ operator.bridge_cuts = ob.tissue_tessellate.bridge_cuts
+ operator.bridge_smoothness = ob.tissue_tessellate.bridge_smoothness
+ operator.cap_material_offset = ob.tissue_tessellate.cap_material_offset
+ operator.patch_subs = ob.tissue_tessellate.patch_subs
+ operator.frame_boundary = ob.tissue_tessellate.frame_boundary
+ operator.fill_frame = ob.tissue_tessellate.fill_frame
+ operator.boundary_mat_offset = ob.tissue_tessellate.boundary_mat_offset
+ operator.fill_frame_mat = ob.tissue_tessellate.fill_frame_mat
+ operator.frame_thickness = ob.tissue_tessellate.frame_thickness
+ operator.frame_mode = ob.tissue_tessellate.frame_mode
+ operator.use_origin_offset = ob.tissue_tessellate.use_origin_offset
+ operator.vertex_group_thickness = ob.tissue_tessellate.vertex_group_thickness
+ operator.invert_vertex_group_thickness = ob.tissue_tessellate.invert_vertex_group_thickness
+ operator.vertex_group_thickness_factor = ob.tissue_tessellate.vertex_group_thickness_factor
+ operator.vertex_group_distribution = ob.tissue_tessellate.vertex_group_distribution
+ operator.invert_vertex_group_distribution = ob.tissue_tessellate.invert_vertex_group_distribution
+ operator.vertex_group_distribution_factor = ob.tissue_tessellate.vertex_group_distribution_factor
+ operator.vertex_group_cap_owner = ob.tissue_tessellate.vertex_group_cap_owner
+ operator.vertex_group_cap = ob.tissue_tessellate.vertex_group_cap
+ operator.invert_vertex_group_cap = ob.tissue_tessellate.invert_vertex_group_cap
+ operator.vertex_group_bridge_owner = ob.tissue_tessellate.vertex_group_bridge_owner
+ operator.vertex_group_bridge = ob.tissue_tessellate.vertex_group_bridge
+ operator.invert_vertex_group_bridge = ob.tissue_tessellate.invert_vertex_group_bridge
+ operator.vertex_group_rotation = ob.tissue_tessellate.vertex_group_rotation
+ operator.invert_vertex_group_rotation = ob.tissue_tessellate.invert_vertex_group_rotation
+ operator.smooth_normals = ob.tissue_tessellate.smooth_normals
+ operator.smooth_normals_iter = ob.tissue_tessellate.smooth_normals_iter
+ operator.smooth_normals_uv = ob.tissue_tessellate.smooth_normals_uv
+ operator.vertex_group_smooth_normals = ob.tissue_tessellate.vertex_group_smooth_normals
+ operator.invert_vertex_group_smooth_normals = ob.tissue_tessellate.invert_vertex_group_smooth_normals
+ operator.component_mode = ob.tissue_tessellate.component_mode
+ operator.consistent_wedges = ob.tissue_tessellate.consistent_wedges
+ operator.normals_x = ob.tissue_tessellate.normals_x
+ operator.normals_y = ob.tissue_tessellate.normals_y
+ operator.normals_z = ob.tissue_tessellate.normals_z
+ operator.vertex_group_scale_normals = ob.tissue_tessellate.vertex_group_scale_normals
+ operator.invert_vertex_group_scale_normals = ob.tissue_tessellate.invert_vertex_group_scale_normals
+ operator.boundary_variable_offset = ob.tissue_tessellate.boundary_variable_offset
+ operator.auto_rotate_boundary = ob.tissue_tessellate.auto_rotate_boundary
+ return ob
+
+def props_to_dict(ob):
+ props = ob.tissue_tessellate
+ tessellate_dict = {}
+ tessellate_dict['self'] = ob
+ tessellate_dict['generator'] = props.generator
+ tessellate_dict['component'] = props.component
+ tessellate_dict['component_coll'] = props.component_coll
+ tessellate_dict['offset'] = props.offset
+ tessellate_dict['zscale'] = props.zscale
+ tessellate_dict['gen_modifiers'] = props.gen_modifiers
+ tessellate_dict['com_modifiers'] = props.com_modifiers
+ tessellate_dict['mode'] = props.mode
+ tessellate_dict['scale_mode'] = props.scale_mode
+ tessellate_dict['rotation_mode'] = props.rotation_mode
+ tessellate_dict['rotation_shift'] = props.rotation_shift
+ tessellate_dict['rotation_direction'] = props.rotation_direction
+ tessellate_dict['rand_seed'] = props.rand_seed
+ tessellate_dict['coll_rand_seed'] = props.coll_rand_seed
+ tessellate_dict['rand_step'] = props.rand_step
+ tessellate_dict['fill_mode'] = props.fill_mode
+ tessellate_dict['bool_vertex_group'] = props.bool_vertex_group
+ tessellate_dict['bool_selection'] = props.bool_selection
+ tessellate_dict['bool_shapekeys'] = props.bool_shapekeys
+ tessellate_dict['bool_material_id'] = props.bool_material_id
+ tessellate_dict['material_id'] = props.material_id
+ tessellate_dict['normals_mode'] = props.normals_mode
+ tessellate_dict['bounds_x'] = props.bounds_x
+ tessellate_dict['bounds_y'] = props.bounds_y
+ tessellate_dict['use_origin_offset'] = props.use_origin_offset
+ tessellate_dict['target'] = props.target
+ tessellate_dict['even_thickness'] = props.even_thickness
+ tessellate_dict['even_thickness_iter'] = props.even_thickness_iter
+ tessellate_dict['frame_thickness'] = props.frame_thickness
+ tessellate_dict['frame_mode'] = props.frame_mode
+ tessellate_dict['frame_boundary'] = props.frame_boundary
+ tessellate_dict['fill_frame'] = props.fill_frame
+ tessellate_dict['boundary_mat_offset'] = props.boundary_mat_offset
+ tessellate_dict['fill_frame_mat'] = props.fill_frame_mat
+ tessellate_dict['vertex_group_thickness'] = props.vertex_group_thickness
+ tessellate_dict['invert_vertex_group_thickness'] = props.invert_vertex_group_thickness
+ tessellate_dict['vertex_group_thickness_factor'] = props.vertex_group_thickness_factor
+ tessellate_dict['vertex_group_distribution'] = props.vertex_group_distribution
+ tessellate_dict['invert_vertex_group_distribution'] = props.invert_vertex_group_distribution
+ tessellate_dict['vertex_group_distribution_factor'] = props.vertex_group_distribution_factor
+ tessellate_dict['vertex_group_cap_owner'] = props.vertex_group_cap_owner
+ tessellate_dict['vertex_group_cap'] = props.vertex_group_cap
+ tessellate_dict['invert_vertex_group_cap'] = props.invert_vertex_group_cap
+ tessellate_dict['vertex_group_bridge_owner'] = props.vertex_group_bridge_owner
+ tessellate_dict['vertex_group_bridge'] = props.vertex_group_bridge
+ tessellate_dict['invert_vertex_group_bridge'] = props.invert_vertex_group_bridge
+ tessellate_dict['vertex_group_rotation'] = props.vertex_group_rotation
+ tessellate_dict['invert_vertex_group_rotation'] = props.invert_vertex_group_rotation
+ tessellate_dict['smooth_normals'] = props.smooth_normals
+ tessellate_dict['smooth_normals_iter'] = props.smooth_normals_iter
+ tessellate_dict['smooth_normals_uv'] = props.smooth_normals_uv
+ tessellate_dict['vertex_group_smooth_normals'] = props.vertex_group_smooth_normals
+ tessellate_dict['invert_vertex_group_smooth_normals'] = props.invert_vertex_group_smooth_normals
+ tessellate_dict['component_mode'] = props.component_mode
+ tessellate_dict['consistent_wedges'] = props.consistent_wedges
+ tessellate_dict["normals_x"] = props.normals_x
+ tessellate_dict["normals_y"] = props.normals_y
+ tessellate_dict["normals_z"] = props.normals_z
+ tessellate_dict["vertex_group_scale_normals"] = props.vertex_group_scale_normals
+ tessellate_dict["invert_vertex_group_scale_normals"] = props.invert_vertex_group_scale_normals
+ tessellate_dict["boundary_variable_offset"] = props.boundary_variable_offset
+ tessellate_dict["auto_rotate_boundary"] = props.auto_rotate_boundary
+ return tessellate_dict
+
+def copy_tessellate_props(source_ob, target_ob):
+ source_props = source_ob.tissue_tessellate
+ target_props = target_ob.tissue_tessellate
+ for key in source_props.keys():
+ target_props[key] = source_props[key]
+ return
diff --git a/mesh_tissue/utils.py b/mesh_tissue/utils.py
index f98bc6d0..cf43d609 100644
--- a/mesh_tissue/utils.py
+++ b/mesh_tissue/utils.py
@@ -1,87 +1,57 @@
# SPDX-License-Identifier: GPL-2.0-or-later
-import bpy
+import bpy, bmesh
import threading
import numpy as np
import multiprocessing
from multiprocessing import Process, Pool
+from mathutils import Vector, Matrix
+from math import *
+try: from .numba_functions import *
+except: pass
-weight = []
-n_threads = multiprocessing.cpu_count()
-
-class ThreadVertexGroup(threading.Thread):
- def __init__ ( self, id, vertex_group, n_verts):
- self.id = id
- self.vertex_group = vertex_group
- self.n_verts = n_verts
- threading.Thread.__init__ ( self )
-
- def run (self):
- global weight
- global n_threads
- verts = np.arange(int(self.n_verts/8))*8 + self.id
- for v in verts:
- try:
- weight[v] = self.vertex_group.weight(v)
- except:
- pass
-
-def thread_read_weight(_weight, vertex_group):
- global weight
- global n_threads
- print(n_threads)
- weight = _weight
- n_verts = len(weight)
- threads = [ThreadVertexGroup(i, vertex_group, n_verts) for i in range(n_threads)]
- for t in threads: t.start()
- for t in threads: t.join()
- return weight
+from . import config
-def process_read_weight(id, vertex_group, n_verts):
- global weight
- global n_threads
- verts = np.arange(int(self.n_verts/8))*8 + self.id
- for v in verts:
- try:
- weight[v] = self.vertex_group.weight(v)
- except:
- pass
-
-
-def read_weight(_weight, vertex_group):
- global weight
- global n_threads
- print(n_threads)
- weight = _weight
- n_verts = len(weight)
- n_cores = multiprocessing.cpu_count()
- pool = Pool(processes=n_cores)
- multiple_results = [pool.apply_async(process_read_weight, (i, vertex_group, n_verts)) for i in range(n_cores)]
- #processes = [Process(target=process_read_weight, args=(i, vertex_group, n_verts)) for i in range(n_threads)]
- #for t in processes: t.start()
- #for t in processes: t.join()
- return weight
+def use_numba_tess():
+ tissue_addon = bpy.context.preferences.addons[__package__]
+ if 'use_numba_tess' in tissue_addon.preferences.keys():
+ return tissue_addon.preferences['use_numba_tess']
+ else:
+ return True
-#Recursively transverse layer_collection for a particular name
-def recurLayerCollection(layerColl, collName):
- found = None
- if (layerColl.name == collName):
- return layerColl
- for layer in layerColl.children:
- found = recurLayerCollection(layer, collName)
- if found:
- return found
+def tissue_time(start_time, name, levels=0):
+ tissue_addon = bpy.context.preferences.addons[__package__]
+ end_time = time.time()
+ if 'print_stats' in tissue_addon.preferences.keys():
+ ps = tissue_addon.preferences['print_stats']
+ else:
+ ps = 1
+ if levels < ps:
+ if "Tissue: " in name: head = ""
+ else: head = " "
+ if start_time:
+ print('{}{}{} in {:.4f} sec'.format(head, "| "*levels, name, end_time - start_time))
+ else:
+ print('{}{}{}'.format(head, "| "*levels, name))
+ return end_time
-def auto_layer_collection():
- # automatically change active layer collection
- layer = bpy.context.view_layer.active_layer_collection
- layer_collection = bpy.context.view_layer.layer_collection
- if layer.hide_viewport or layer.collection.hide_viewport:
- collections = bpy.context.object.users_collection
- for c in collections:
- lc = recurLayerCollection(layer_collection, c.name)
- if not c.hide_viewport and not lc.hide_viewport:
- bpy.context.view_layer.active_layer_collection = lc
+
+# ------------------------------------------------------------------
+# MATH
+# ------------------------------------------------------------------
+
+def _np_broadcast(arrays):
+ shapes = [arr.shape for arr in arrays]
+ for i in range(len(shapes[0])):
+ ish = [sh[i] for sh in shapes]
+ max_len = max(ish)
+ for j in range(len(arrays)):
+ leng = ish[j]
+ if leng == 1: arrays[j] = np.repeat(arrays[j], max_len, axis=i)
+ for arr in arrays:
+ arr = arr.flatten()
+ #vt = v0 + (v1 - v0) * t
+ return arrays
def lerp(a, b, t):
return a + (b - a) * t
@@ -94,7 +64,8 @@ def _lerp2(v1, v2, v3, v4, v):
def lerp2(v1, v2, v3, v4, v):
v12 = v1 + (v2 - v1) * v.x
v34 = v3 + (v4 - v3) * v.x
- return v12 + (v34 - v12) * v.y
+ v = v12 + (v34 - v12) * v.y
+ return v
def lerp3(v1, v2, v3, v4, v):
loc = lerp2(v1.co, v2.co, v3.co, v4.co, v)
@@ -102,38 +73,149 @@ def lerp3(v1, v2, v3, v4, v):
nor.normalize()
return loc + nor * v.z
-def _convert_object_to_mesh(ob, apply_modifiers=True, preserve_status=True):
- if not apply_modifiers:
- mod_visibility = [m.show_viewport for m in ob.modifiers]
- for m in ob.modifiers:
- m.show_viewport = False
- if preserve_status:
- # store status
- mode = bpy.context.object.mode
- selected = bpy.context.selected_objects
- active = bpy.context.object
- # change status
- bpy.ops.object.mode_set(mode='OBJECT')
- bpy.ops.object.select_all(action='DESELECT')
- new_ob = ob.copy()
- new_ob.data = ob.data.copy()
- bpy.context.collection.objects.link(new_ob)
- bpy.context.view_layer.objects.active = new_ob
- new_ob.select_set(True)
- bpy.ops.object.convert(target='MESH')
- if preserve_status:
- # restore status
- bpy.ops.object.select_all(action='DESELECT')
- for o in selected: o.select_set(True)
- bpy.context.view_layer.objects.active = active
- bpy.ops.object.mode_set(mode=mode)
- if not apply_modifiers:
- for m,vis in zip(ob.modifiers,mod_visibility):
- m.show_viewport = vis
- return new_ob
+import sys
+def np_lerp2(v00, v10, v01, v11, vx, vy, mode=''):
+ if 'numba' in sys.modules and use_numba_tess():
+ if mode == 'verts':
+ co2 = numba_interp_points(v00, v10, v01, v11, vx, vy)
+ elif mode == 'shapekeys':
+ co2 = numba_interp_points_sk(v00, v10, v01, v11, vx, vy)
+ else:
+ co2 = numba_lerp2(v00, v10, v01, v11, vx, vy)
+ else:
+ co0 = v00 + (v10 - v00) * vx
+ co1 = v01 + (v11 - v01) * vx
+ co2 = co0 + (co1 - co0) * vy
+ return co2
+
+def calc_thickness(co2,n2,vz,a,weight):
+ if 'numba' in sys.modules and use_numba_tess():
+ if len(co2.shape) == 3:
+ if type(a) != np.ndarray:
+ a = np.ones(len(co2)).reshape((-1,1,1))
+ if type(weight) != np.ndarray:
+ weight = np.ones(len(co2)).reshape((-1,1,1))
+ co3 = numba_calc_thickness_area_weight(co2,n2,vz,a,weight)
+ elif len(co2.shape) == 4:
+ n_patches = co2.shape[0]
+ n_sk = co2.shape[1]
+ n_verts = co2.shape[2]
+ if type(a) != np.ndarray:
+ a = np.ones(n_patches).reshape((n_patches,1,1,1))
+ if type(weight) != np.ndarray:
+ weight = np.ones(n_patches).reshape((n_patches,1,1,1))
+ na = a.shape[1]-1
+ nw = weight.shape[1]-1
+ co3 = np.empty((n_sk,n_patches,n_verts,3))
+ for i in range(n_sk):
+ co3[i] = numba_calc_thickness_area_weight(co2[:,i],n2[:,i],vz[:,i],a[:,min(i,na)],weight[:,min(i,nw)])
+ co3 = co3.swapaxes(0,1)
+ else:
+ use_area = type(a) == np.ndarray
+ use_weight = type(weight) == np.ndarray
+ if use_area:
+ if use_weight:
+ co3 = co2 + n2 * vz * a * weight
+ else:
+ co3 = co2 + n2 * vz * a
+ else:
+ if use_weight:
+ co3 = co2 + n2 * vz * weight
+ else:
+ co3 = co2 + n2 * vz
+ return co3
+
+def combine_and_flatten(arrays):
+ if 'numba' in sys.modules:
+ new_list = numba_combine_and_flatten(arrays)
+ else:
+ new_list = np.concatenate(arrays, axis=0)
+ new_list = new_list.flatten().tolist()
+ return new_list
+
+def np_interp2(grid, vx, vy):
+ grid_shape = grid.shape[-2:]
+ levels = len(grid.shape)-2
+ nu = grid_shape[0]
+ nv = grid_shape[1]
+ u = np.arange(nu)/(nu-1)
+ v = np.arange(nv)/(nv-1)
+ u_shape = [1]*levels + [nu]
+ v_shape = [1]*levels + [nv]
+
+ co0 = np.interp()
+ co1 = np.interp()
+ co2 = np.interp()
+ return co2
+
+def flatten_vector(vec, x, y):
+ """
+ Find planar vector according to two axis.
+ :arg vec: Input vector.
+ :type vec: :class:'mathutils.Vector'
+ :arg x: First axis.
+ :type x: :class:'mathutils.Vector'
+ :arg y: Second axis.
+ :type y: :class:'mathutils.Vector'
+ :return: Projected 2D Vector.
+ :rtype: :class:'mathutils.Vector'
+ """
+ vx = vec.project(x)
+ vy = vec.project(y)
+ mult = 1 if vx.dot(x) > 0 else -1
+ vx = mult*vx.length
+ mult = 1 if vy.dot(y) > 0 else -1
+ vy = mult*vy.length
+ return Vector((vx, vy))
+
+def vector_rotation(vec):
+ """
+ Find vector rotation according to X axis.
+ :arg vec: Input vector.
+ :type vec: :class:'mathutils.Vector'
+ :return: Angle in radians.
+ :rtype: float
+ """
+ v0 = Vector((1,0))
+ ang = Vector.angle_signed(vec, v0)
+ if ang < 0: ang = 2*pi + ang
+ return ang
+
+# ------------------------------------------------------------------
+# SCENE
+# ------------------------------------------------------------------
+
+def set_animatable_fix_handler(self, context):
+ '''
+ Prevent Blender Crashes with handlers
+ '''
+ old_handlers = []
+ blender_handlers = bpy.app.handlers.render_init
+ for h in blender_handlers:
+ if "turn_off_animatable" in str(h):
+ old_handlers.append(h)
+ for h in old_handlers: blender_handlers.remove(h)
+ blender_handlers.append(turn_off_animatable)
+ return
+
+def turn_off_animatable(scene):
+ '''
+ Prevent Blender Crashes with handlers
+ '''
+ for o in [o for o in bpy.data.objects if o.type == 'MESH']:
+ o.tissue_tessellate.bool_run = False
+ #if not o.reaction_diffusion_settings.bool_cache:
+ # o.reaction_diffusion_settings.run = False
+ #except: pass
+ return
+
+# ------------------------------------------------------------------
+# OBJECTS
+# ------------------------------------------------------------------
def convert_object_to_mesh(ob, apply_modifiers=True, preserve_status=True):
- if not ob.name: return None
+ try: ob.name
+ except: return None
if ob.type != 'MESH':
if not apply_modifiers:
mod_visibility = [m.show_viewport for m in ob.modifiers]
@@ -150,7 +232,9 @@ def convert_object_to_mesh(ob, apply_modifiers=True, preserve_status=True):
else:
if apply_modifiers:
new_ob = ob.copy()
- new_ob.data = simple_to_mesh(ob)
+ new_me = simple_to_mesh(ob)
+ new_ob.modifiers.clear()
+ new_ob.data = new_me
else:
new_ob = ob.copy()
new_ob.data = ob.data.copy()
@@ -164,27 +248,1209 @@ def convert_object_to_mesh(ob, apply_modifiers=True, preserve_status=True):
bpy.context.view_layer.objects.active = new_ob
return new_ob
-def simple_to_mesh(ob):
- dg = bpy.context.evaluated_depsgraph_get()
+def simple_to_mesh(ob, depsgraph=None):
+ '''
+ Convert object to mesh applying Modifiers and Shape Keys
+ '''
+ #global evaluatedDepsgraph
+ if depsgraph == None:
+ if config.evaluatedDepsgraph == None:
+ dg = bpy.context.evaluated_depsgraph_get()
+ else: dg = config.evaluatedDepsgraph
+ else:
+ dg = depsgraph
ob_eval = ob.evaluated_get(dg)
me = bpy.data.meshes.new_from_object(ob_eval, preserve_all_data_layers=True, depsgraph=dg)
me.calc_normals()
return me
-# Prevent Blender Crashes with handlers
-def set_animatable_fix_handler(self, context):
- old_handlers = []
- blender_handlers = bpy.app.handlers.render_init
- for h in blender_handlers:
- if "turn_off_animatable" in str(h):
- old_handlers.append(h)
- for h in old_handlers: blender_handlers.remove(h)
- blender_handlers.append(turn_off_animatable)
- return
+def _join_objects(context, objects, link_to_scene=True, make_active=True):
+ C = context
+ bm = bmesh.new()
-def turn_off_animatable(scene):
- for o in bpy.data.objects:
- o.tissue_tessellate.bool_run = False
- o.reaction_diffusion_settings.run = False
- #except: pass
- return
+ materials = {}
+ faces_materials = []
+ if config.evaluatedDepsgraph == None:
+ dg = C.evaluated_depsgraph_get()
+ else: dg = config.evaluatedDepsgraph
+
+ for o in objects:
+ bm.from_object(o, dg)
+ # add object's material to the dictionary
+ for m in o.data.materials:
+ if m not in materials: materials[m] = len(materials)
+ for f in o.data.polygons:
+ index = f.material_index
+ mat = o.material_slots[index].material
+ new_index = materials[mat]
+ faces_materials.append(new_index)
+ bm.verts.ensure_lookup_table()
+ bm.edges.ensure_lookup_table()
+ bm.faces.ensure_lookup_table()
+ # assign new indexes
+ for index, f in zip(faces_materials, bm.faces): f.material_index = index
+ # create object
+ me = bpy.data.meshes.new('joined')
+ bm.to_mesh(me)
+ me.update()
+ ob = bpy.data.objects.new('joined', me)
+ if link_to_scene: C.collection.objects.link(ob)
+ # make active
+ if make_active:
+ for o in C.view_layer.objects: o.select_set(False)
+ ob.select_set(True)
+ C.view_layer.objects.active = ob
+ # add materials
+ for m in materials.keys(): ob.data.materials.append(m)
+
+ return ob
+
+def join_objects(context, objects):
+ generated_data = [o.data for o in objects]
+ context.view_layer.update()
+ for o in context.view_layer.objects:
+ o.select_set(o in objects)
+ bpy.ops.object.join()
+ new_ob = context.view_layer.objects.active
+ new_ob.select_set(True)
+ for me in generated_data:
+ if me != new_ob.data:
+ bpy.data.meshes.remove(me)
+ return new_ob
+
+def join_objects(objects):
+ override = bpy.context.copy()
+ new_ob = objects[0]
+ override['active_object'] = new_ob
+ override['selected_editable_objects'] = objects
+ bpy.ops.object.join(override)
+ return new_ob
+
+def repeat_mesh(me, n):
+ '''
+ Return Mesh data adding and applying an array without offset (Slower)
+ '''
+ bm = bmesh.new()
+ for i in range(n): bm.from_mesh(me)
+ new_me = me.copy()
+ bm.to_mesh(new_me)
+ bm.free()
+ return new_me
+
+def array_mesh(ob, n):
+ '''
+ Return Mesh data adding and applying an array without offset
+ '''
+ arr = ob.modifiers.new('Repeat','ARRAY')
+ arr.relative_offset_displace[0] = 0
+ arr.count = n
+ #bpy.ops.object.modifier_apply({'active_object':ob},modifier='Repeat')
+ #me = ob.data
+ ob.modifiers.update()
+
+ dg = bpy.context.evaluated_depsgraph_get()
+ me = simple_to_mesh(ob, depsgraph=dg)
+ ob.modifiers.remove(arr)
+ return me
+
+def array_mesh_object(ob, n):
+ '''
+ Return Mesh data adding and applying an array without offset
+ '''
+ arr = ob.modifiers.new('Repeat','ARRAY')
+ arr.relative_offset_displace[0] = 0
+ arr.count = n
+ ob.modifiers.update()
+ override = bpy.context.copy()
+ override['active_object'] = ob
+ override = {'active_object': ob}
+ bpy.ops.object.modifier_apply(override, modifier=arr.name)
+ return ob
+
+
+def get_mesh_before_subs(ob):
+ not_allowed = ('FLUID_SIMULATION', 'ARRAY', 'BEVEL', 'BOOLEAN', 'BUILD',
+ 'DECIMATE', 'EDGE_SPLIT', 'MASK', 'MIRROR', 'REMESH',
+ 'SCREW', 'SOLIDIFY', 'TRIANGULATE', 'WIREFRAME', 'SKIN',
+ 'EXPLODE', 'PARTICLE_INSTANCE', 'PARTICLE_SYSTEM', 'SMOKE')
+ subs = 0
+ hide_mods = []
+ mods_visibility = []
+ for m in ob.modifiers:
+ hide_mods.append(m)
+ mods_visibility.append(m.show_viewport)
+ if m.type in ('SUBSURF','MULTIRES'):
+ hide_mods = [m]
+ subs = m.levels
+ elif m.type in not_allowed:
+ subs = 0
+ hide_mods = []
+ mods_visibility = []
+ for m in hide_mods: m.show_viewport = False
+ me = simple_to_mesh(ob)
+ for m, vis in zip(hide_mods,mods_visibility): m.show_viewport = vis
+ return me, subs
+
+# ------------------------------------------------------------------
+# MESH FUNCTIONS
+# ------------------------------------------------------------------
+
+def calc_verts_area(me):
+ n_verts = len(me.vertices)
+ n_faces = len(me.polygons)
+ vareas = np.zeros(n_verts)
+ vcount = np.zeros(n_verts)
+ parea = [0]*n_faces
+ pverts = [0]*n_faces*4
+ me.polygons.foreach_get('area', parea)
+ me.polygons.foreach_get('vertices', pverts)
+ parea = np.array(parea)
+ pverts = np.array(pverts).reshape((n_faces, 4))
+ for a, verts in zip(parea,pverts):
+ vareas[verts] += a
+ vcount[verts] += 1
+ return vareas / vcount
+
+def calc_verts_area_bmesh(me):
+ bm = bmesh.new()
+ bm.from_mesh(me)
+ bm.verts.ensure_lookup_table()
+ verts_area = np.zeros(len(me.vertices))
+ for v in bm.verts:
+ area = 0
+ faces = v.link_faces
+ for f in faces:
+ area += f.calc_area()
+ verts_area[v.index] = area if area == 0 else area/len(faces)
+ bm.free()
+ return verts_area
+
+import time
+
+def get_patches____(me_low, me_high, sides, subs, bool_selection, bool_material_id, material_id):
+ nv = len(me_low.vertices) # number of vertices
+ ne = len(me_low.edges) # number of edges
+ nf = len(me_low.polygons) # number of polygons
+ n = 2**subs + 1 # number of vertices along each patch edge
+ nev = ne * n # number of vertices along the subdivided edges
+ nevi = nev - 2*ne # internal vertices along subdividede edges
+
+ n0 = 2**(subs-1) - 1
+
+ # filtered polygonal faces
+ poly_sides = np.array([len(p.vertices) for p in me_low.polygons])
+ mask = poly_sides == sides
+ if bool_material_id:
+ mask_material = [1]*nf
+ me_low.polygons.foreach_get('material_index',mask_material)
+ mask_material = np.array(mask_material) == material_id
+ mask = np.logical_and(mask,mask_material)
+ if bool_selection:
+ mask_selection = [True]*nf
+ me_low.polygons.foreach_get('select',mask_selection)
+ mask_selection = np.array(mask_selection)
+ mask = np.logical_and(mask,mask_selection)
+ polys = np.array(me_low.polygons)[mask]
+ mult = n0**2 + n0
+ ps = poly_sides * mult + 1
+ ps = np.insert(ps,0,nv + nevi, axis=0)[:-1]
+ ips = ps.cumsum()[mask] # incremental polygon sides
+ nf = len(polys)
+
+ # when subdivided quad faces follows a different pattern
+ if sides == 4:
+ n_patches = nf
+ else:
+ n_patches = nf*sides
+
+ if sides == 4:
+ patches = np.zeros((nf,n,n),dtype='int')
+ verts = [[vv for vv in p.vertices] for p in polys if len(p.vertices) == sides]
+ verts = np.array(verts).reshape((-1,sides))
+
+ # filling corners
+
+ patches[:,0,0] = verts[:,0]
+ patches[:,n-1,0] = verts[:,1]
+ patches[:,n-1,n-1] = verts[:,2]
+ patches[:,0,n-1] = verts[:,3]
+
+ if subs != 0:
+ shift_verts = np.roll(verts, -1, axis=1)[:,:,None]
+ edge_keys = np.concatenate((shift_verts, verts[:,:,None]), axis=2)
+ edge_keys.sort()
+
+ edge_verts = np.array(me_low.edge_keys) # edges keys
+ edges_index = np.zeros((ne,ne),dtype='int')
+ edges_index[edge_verts[:,0],edge_verts[:,1]] = np.arange(ne)
+
+ evi = np.arange(nevi) + nv
+ evi = evi.reshape(ne,n-2) # edges inner verts
+ straight = np.arange(n-2)+1
+ inverted = np.flip(straight)
+ inners = np.array([[j*(n-2)+i for j in range(n-2)] for i in range(n-2)])
+
+ ek1 = np.array(me_high.edge_keys) # edges keys
+ ids0 = np.arange(ne)*(n-1) # edge keys highres
+ keys0 = ek1[ids0] # first inner edge
+ keys1 = ek1[ids0 + n-2] # last inner edge
+ keys = np.concatenate((keys0,keys1))
+ pick_verts = np.array((inverted,straight))
+
+ patch_index = np.arange(nf)[:,None,None]
+
+ # edge 0
+ e0 = edge_keys[:,0] # get edge key (faces, 2)
+ edge_id = edges_index[e0[:,0],e0[:,1]] # edge index
+ edge_verts = evi[edge_id] # indexes of inner vertices
+ test = np.concatenate((verts[:,0,None], edge_verts[:,0,None]),axis=1)
+ dir = (test[:,None] == keys).all(2).any(1).astype('int8')
+ #dir = np.full(verts[:,0].shape, 0, dtype='int8')
+ ids = pick_verts[dir][:,None,:] # indexes order along the side
+ patches[patch_index,ids,0] = edge_verts[:,None,:] # assign indexes
+ #patches[:,msk] = inverted # np.flip(patches[msk])
+
+ # edge 1
+ e0 = edge_keys[:,1] # get edge key (faces, 2)
+ edge_id = edges_index[e0[:,0],e0[:,1]] # edge index
+ edge_verts = evi[edge_id] # indexes of inner vertices
+ test = np.concatenate((verts[:,1,None], edge_verts[:,0,None]),axis=1)
+ dir = (test[:,None] == keys).all(2).any(1).astype('int8')
+ ids = pick_verts[dir][:,:,None] # indexes order along the side
+ patches[patch_index,n-1,ids] = edge_verts[:,:,None] # assign indexes
+
+ # edge 2
+ e0 = edge_keys[:,2] # get edge key (faces, 2)
+ edge_id = edges_index[e0[:,0],e0[:,1]] # edge index
+ edge_verts = evi[edge_id] # indexes of inner vertices
+ test = np.concatenate((verts[:,3,None], edge_verts[:,0,None]),axis=1)
+ dir = (test[:,None] == keys).all(2).any(1).astype('int8')
+ ids = pick_verts[dir][:,None,:] # indexes order along the side
+ patches[patch_index,ids,n-1] = edge_verts[:,None,:] # assign indexes
+
+ # edge 3
+ e0 = edge_keys[:,3] # get edge key (faces, 2)
+ edge_id = edges_index[e0[:,0],e0[:,1]] # edge index
+ edge_verts = evi[edge_id] # indexes of inner vertices
+ test = np.concatenate((verts[:,0,None], edge_verts[:,0,None]),axis=1)
+ dir = (test[:,None] == keys).all(2).any(1).astype('int8')
+ ids = pick_verts[dir][:,:,None] # indexes order along the side
+ patches[patch_index,0,ids] = edge_verts[:,:,None] # assign indexes
+
+ # fill inners
+ patches[:,1:-1,1:-1] = inners[None,:,:] + ips[:,None,None]
+
+ #end_time = time.time()
+ #print('Tissue: Got Patches in {:.4f} sec'.format(end_time-start_time))
+
+ return patches, mask
+
+def tessellate_prepare_component(ob1, props):
+ mode = props['mode']
+ bounds_x = props['bounds_x']
+ bounds_y = props['bounds_y']
+ scale_mode = props['scale_mode']
+ normals_mode = props['normals_mode']
+ zscale = props['zscale']
+ offset = props['offset']
+ use_origin_offset = props['use_origin_offset']
+ bool_shapekeys = props['bool_shapekeys']
+
+ thres = 0.005
+
+ me1 = ob1.data
+
+ # Component statistics
+ n_verts = len(me1.vertices)
+
+ # Component bounding box
+ min_c = Vector((0, 0, 0))
+ max_c = Vector((0, 0, 0))
+ first = True
+ for v in me1.vertices:
+ vert = v.co
+ if vert[0] < min_c[0] or first:
+ min_c[0] = vert[0]
+ if vert[1] < min_c[1] or first:
+ min_c[1] = vert[1]
+ if vert[2] < min_c[2] or first:
+ min_c[2] = vert[2]
+ if vert[0] > max_c[0] or first:
+ max_c[0] = vert[0]
+ if vert[1] > max_c[1] or first:
+ max_c[1] = vert[1]
+ if vert[2] > max_c[2] or first:
+ max_c[2] = vert[2]
+ first = False
+ bb = max_c - min_c
+
+ # adaptive XY
+ verts1 = []
+ for v in me1.vertices:
+ if mode == 'BOUNDS':
+ vert = v.co - min_c # (ob1.matrix_world * v.co) - min_c
+ if use_origin_offset: vert[2] = v.co[2]
+ vert[0] = vert[0] / bb[0] if bb[0] != 0 else 0.5
+ vert[1] = vert[1] / bb[1] if bb[1] != 0 else 0.5
+ if scale_mode == 'CONSTANT' or normals_mode in ('OBJECT', 'SHAPEKEYS'):
+ if not use_origin_offset:
+ vert[2] = vert[2] / bb[2] if bb[2] != 0 else 0
+ vert[2] = vert[2] - 0.5 + offset * 0.5
+ else:
+ if not use_origin_offset:
+ vert[2] = vert[2] + (-0.5 + offset * 0.5) * bb[2]
+ vert[2] *= zscale
+ elif mode == 'LOCAL':
+ vert = v.co.xyz
+ vert[2] *= zscale
+ #vert[2] = (vert[2] - min_c[2] + (-0.5 + offset * 0.5) * bb[2]) * zscale
+ elif mode == 'GLOBAL':
+ vert = ob1.matrix_world @ v.co
+ vert[2] *= zscale
+ try:
+ for sk in me1.shape_keys.key_blocks:
+ sk.data[v.index].co = ob1.matrix_world @ sk.data[v.index].co
+ except: pass
+ v.co = vert
+
+ # ShapeKeys
+ if bool_shapekeys and ob1.data.shape_keys:
+ for sk in ob1.data.shape_keys.key_blocks:
+ source = sk.data
+ _sk_uv_quads = [0]*len(verts1)
+ _sk_uv = [0]*len(verts1)
+ for i, sk_v in enumerate(source):
+ if mode == 'BOUNDS':
+ sk_vert = sk_v.co - min_c
+ if use_origin_offset: sk_vert[2] = sk_v.co[2]
+ sk_vert[0] = (sk_vert[0] / bb[0] if bb[0] != 0 else 0.5)
+ sk_vert[1] = (sk_vert[1] / bb[1] if bb[1] != 0 else 0.5)
+ if scale_mode == 'CONSTANT' or normals_mode in ('OBJECT', 'SHAPEKEYS'):
+ if not use_origin_offset:
+ sk_vert[2] = (sk_vert[2] / bb[2] if bb[2] != 0 else sk_vert[2])
+ sk_vert[2] = sk_vert[2] - 0.5 + offset * 0.5
+ else:
+ if not use_origin_offset:
+ sk_vert[2] = sk_vert[2] + (- 0.5 + offset * 0.5) * bb[2]
+ sk_vert[2] *= zscale
+ elif mode == 'LOCAL':
+ sk_vert = sk_v.co
+ sk_vert[2] *= zscale
+ elif mode == 'GLOBAL':
+ sk_vert = sk_v.co
+ sk_vert[2] *= zscale
+ sk_v.co = sk_vert
+
+ if mode != 'BOUNDS' and (bounds_x != 'EXTEND' or bounds_y != 'EXTEND'):
+ ob1.active_shape_key_index = 0
+ bm = bmesh.new()
+ bm.from_mesh(me1)
+ # Bound X
+ planes_co = []
+ planes_no = []
+ bounds = []
+ if bounds_x != 'EXTEND':
+ planes_co += [(0,0,0), (1,0,0)]
+ planes_no += [(-1,0,0), (1,0,0)]
+ bounds += [bounds_x, bounds_x]
+ if bounds_y != 'EXTEND':
+ planes_co += [(0,0,0), (0,1,0)]
+ planes_no += [(0,-1,0), (0,1,0)]
+ bounds += [bounds_y, bounds_y]
+ for co, norm, bound in zip(planes_co, planes_no, bounds):
+ count = 0
+ while True:
+ moved = 0
+ original_edges = list(bm.edges)
+ geom = list(bm.verts) + list(bm.edges) + list(bm.faces)
+ bisect = bmesh.ops.bisect_plane(bm, geom=geom, dist=0,
+ plane_co=co, plane_no=norm, use_snap_center=False,
+ clear_outer=bound=='CLIP', clear_inner=False
+ )
+ geom = bisect['geom']
+ cut_edges = [g for g in bisect['geom_cut'] if type(g)==bmesh.types.BMEdge]
+ cut_verts = [g for g in bisect['geom_cut'] if type(g)==bmesh.types.BMVert]
+
+ if bound!='CLIP':
+ for e in cut_edges:
+ seam = True
+ # Prevent glitches
+ for e1 in original_edges:
+ match_00 = (e.verts[0].co-e1.verts[0].co).length < thres
+ match_11 = (e.verts[1].co-e1.verts[1].co).length < thres
+ match_01 = (e.verts[0].co-e1.verts[1].co).length < thres
+ match_10 = (e.verts[1].co-e1.verts[0].co).length < thres
+ if (match_00 and match_11) or (match_01 and match_10):
+ seam = False
+ break
+ e.seam = seam
+
+ if bound == 'CYCLIC':
+ geom_verts = []
+ if norm == (-1,0,0):
+ geom_verts = [v for v in bm.verts if v.co.x < 0]
+ if norm == (1,0,0):
+ geom_verts = [v for v in bm.verts if v.co.x > 1]
+ if norm == (0,-1,0):
+ geom_verts = [v for v in bm.verts if v.co.y < 0]
+ if norm == (0,1,0):
+ geom_verts = [v for v in bm.verts if v.co.y > 1]
+ if len(geom_verts) > 0:
+ geom = bmesh.ops.region_extend(bm, geom=geom_verts,
+ use_contract=False, use_faces=False, use_face_step=True
+ )
+ geom = bmesh.ops.split(bm, geom=geom['geom'], use_only_faces=False)
+ vec = Vector(norm)
+ move_verts = [g for g in geom['geom'] if type(g)==bmesh.types.BMVert]
+ bmesh.ops.translate(bm, vec=-vec, verts=move_verts)
+ for key in bm.verts.layers.shape.keys():
+ sk = bm.verts.layers.shape.get(key)
+ for v in move_verts:
+ v[sk] -= vec
+ moved += len(move_verts)
+ count += 1
+ if moved == 0 or count > 1000: break
+ bm.to_mesh(me1)
+
+ com_area = bb[0]*bb[1]
+ return ob1, com_area
+
+def get_quads(me, bool_selection):
+ nf = len(me.polygons)
+
+ verts = []
+ materials = []
+ mask = []
+ for poly in me.polygons:
+ p = list(poly.vertices)
+ sides = len(p)
+ if sides == 3:
+ verts.append([[p[0], p[-1]], [p[1], p[2]]])
+ materials.append(poly.material_index)
+ mask.append(poly.select if bool_selection else True)
+ elif sides == 4:
+ verts.append([[p[0], p[3]], [p[1], p[2]]])
+ materials.append(poly.material_index)
+ mask.append(poly.select if bool_selection else True)
+ else:
+ while True:
+ new_poly = [[p[-2], p.pop(-1)], [p[1], p.pop(0)]]
+ verts.append(new_poly)
+ materials.append(poly.material_index)
+ mask.append(poly.select if bool_selection else True)
+ if len(p) < 3: break
+ mask = np.array(mask)
+ materials = np.array(materials)[mask]
+ verts = np.array(verts)[mask]
+ return verts, mask, materials
+
+def get_patches(me_low, me_high, sides, subs, bool_selection): #, bool_material_id, material_id):
+ nv = len(me_low.vertices) # number of vertices
+ ne = len(me_low.edges) # number of edges
+ nf = len(me_low.polygons) # number of polygons
+ n = 2**subs + 1
+ nev = ne * n # number of vertices along the subdivided edges
+ nevi = nev - 2*ne # internal vertices along subdividede edges
+
+ n0 = 2**(subs-1) - 1
+
+ # filtered polygonal faces
+ poly_sides = [0]*nf
+ me_low.polygons.foreach_get('loop_total',poly_sides)
+ poly_sides = np.array(poly_sides)
+ mask = poly_sides == sides
+
+ if bool_selection:
+ mask_selection = [True]*nf
+ me_low.polygons.foreach_get('select',mask_selection)
+ mask = np.array(mask_selection)
+
+ materials = [1]*nf
+ me_low.polygons.foreach_get('material_index',materials)
+ materials = np.array(materials)[mask]
+
+ polys = np.array(me_low.polygons)[mask]
+ mult = n0**2 + n0
+ ps = poly_sides * mult + 1
+ ps = np.insert(ps,0,nv + nevi, axis=0)[:-1]
+ ips = ps.cumsum()[mask] # incremental polygon sides
+ nf = len(polys)
+
+ # when subdivided quad faces follows a different pattern
+ if sides == 4:
+ n_patches = nf
+ else:
+ n_patches = nf*sides
+
+ if sides == 4:
+ patches = np.empty((nf,n,n),dtype='int')
+ verts = [list(p.vertices) for p in polys if len(p.vertices) == sides]
+ verts = np.array(verts).reshape((-1,sides))
+
+ # filling corners
+
+ patches[:,0,0] = verts[:,0]
+ patches[:,n-1,0] = verts[:,1]
+ patches[:,n-1,n-1] = verts[:,2]
+ patches[:,0,n-1] = verts[:,3]
+
+ if subs != 0:
+ shift_verts = np.roll(verts, -1, axis=1)[:,:,None]
+ edge_keys = np.concatenate((shift_verts, verts[:,:,None]), axis=2)
+ edge_keys.sort()
+
+ edge_verts = np.array(me_low.edge_keys) # edges keys
+ edges_index = np.empty((ne,ne),dtype='int')
+ edges_index[edge_verts[:,0],edge_verts[:,1]] = np.arange(ne)
+
+ evi = np.arange(nevi) + nv
+ evi = evi.reshape(ne,n-2) # edges inner verts
+ straight = np.arange(n-2)+1
+ inverted = np.flip(straight)
+ inners = np.array([[j*(n-2)+i for j in range(n-2)] for i in range(n-2)])
+
+ ek1 = me_high.edge_keys # edges keys
+ ek1 = np.array(ek1) # edge keys highres
+ keys0 = ek1[np.arange(ne)*(n-1)] # first inner edge
+ keys1 = ek1[np.arange(ne)*(n-1)+n-2] # last inner edge
+ edges_dir = np.zeros((nev,nev),dtype='bool') # Better memory usage
+ #edges_dir = np.zeros((nev,nev),dtype='int8') ### Memory usage not efficient, dictionary as alternative?
+ edges_dir[keys0[:,0], keys0[:,1]] = 1
+ edges_dir[keys1[:,0], keys1[:,1]] = 1
+ pick_verts = np.array((inverted,straight))
+
+ patch_index = np.arange(nf)[:,None,None]
+
+ # edge 0
+ e0 = edge_keys[:,0] # get edge key (faces, 2)
+ edge_id = edges_index[e0[:,0],e0[:,1]] # edge index
+ edge_verts = evi[edge_id] # indexes of inner vertices
+ dir = edges_dir[verts[:,0], edge_verts[:,0]] # check correct direction
+ ids = pick_verts[dir.astype('int8')][:,None,:] # indexes order along the side
+ patches[patch_index,ids,0] = edge_verts[:,None,:] # assign indexes
+
+ # edge 1
+ e0 = edge_keys[:,1] # get edge key (faces, 2)
+ edge_id = edges_index[e0[:,0],e0[:,1]] # edge index
+ edge_verts = evi[edge_id] # indexes of inner vertices
+ dir = edges_dir[verts[:,1], edge_verts[:,0]] # check correct direction
+ ids = pick_verts[dir.astype('int8')][:,:,None] # indexes order along the side
+ patches[patch_index,n-1,ids] = edge_verts[:,:,None] # assign indexes
+
+ # edge 2
+ e0 = edge_keys[:,2] # get edge key (faces, 2)
+ edge_id = edges_index[e0[:,0],e0[:,1]] # edge index
+ edge_verts = evi[edge_id] # indexes of inner vertices
+ dir = edges_dir[verts[:,3], edge_verts[:,0]] # check correct direction
+ ids = pick_verts[dir.astype('int8')][:,None,:] # indexes order along the side
+ patches[patch_index,ids,n-1] = edge_verts[:,None,:] # assign indexes
+
+ # edge 3
+ e0 = edge_keys[:,3] # get edge key (faces, 2)
+ edge_id = edges_index[e0[:,0],e0[:,1]] # edge index
+ edge_verts = evi[edge_id] # indexes of inner vertices
+ dir = edges_dir[verts[:,0], edge_verts[:,0]] # check correct direction
+ ids = pick_verts[dir.astype('int8')][:,:,None] # indexes order along the side
+ patches[patch_index,0,ids] = edge_verts[:,:,None] # assign indexes
+
+ # fill inners
+ patches[:,1:-1,1:-1] = inners[None,:,:] + ips[:,None,None]
+
+ return patches, mask, materials
+
+def get_vertices_numpy(mesh):
+ '''
+ Create a numpy array with the vertices of a given mesh
+ '''
+ n_verts = len(mesh.vertices)
+ verts = [0]*n_verts*3
+ mesh.vertices.foreach_get('co', verts)
+ verts = np.array(verts).reshape((n_verts,3))
+ return verts
+
+def get_vertices_and_normals_numpy(mesh):
+ '''
+ Create two numpy arrays with the vertices and the normals of a given mesh
+ '''
+ n_verts = len(mesh.vertices)
+ verts = [0]*n_verts*3
+ normals = [0]*n_verts*3
+ mesh.vertices.foreach_get('co', verts)
+ mesh.vertices.foreach_get('normal', normals)
+ verts = np.array(verts).reshape((n_verts,3))
+ normals = np.array(normals).reshape((n_verts,3))
+ return verts, normals
+
+def get_normals_numpy(mesh):
+ '''
+ Create a numpy array with the normals of a given mesh
+ '''
+ n_verts = len(mesh.vertices)
+ normals = [0]*n_verts*3
+ mesh.vertices.foreach_get('normal', normals)
+ normals = np.array(normals).reshape((n_verts,3))
+ return normals
+
+def get_edges_numpy(mesh):
+ '''
+ Create a numpy array with the edges of a given mesh
+ '''
+ n_edges = len(mesh.edges)
+ edges = [0]*n_edges*2
+ mesh.edges.foreach_get('vertices', edges)
+ edges = np.array(edges).reshape((n_edges,2)).astype('int')
+ return edges
+
+def get_edges_id_numpy(mesh):
+ n_edges = len(mesh.edges)
+ edges = [0]*n_edges*2
+ mesh.edges.foreach_get('vertices', edges)
+ edges = np.array(edges).reshape((n_edges,2))
+ indexes = np.arange(n_edges).reshape((n_edges,1))
+ edges = np.concatenate((edges,indexes), axis=1)
+ return edges
+
+def get_polygons_select_numpy(mesh):
+ n_polys = len(mesh.polygons)
+ selections = [0]*n_polys*2
+ mesh.polygons.foreach_get('select', selections)
+ selections = np.array(selections)
+ return selections
+
+def get_attribute_numpy(elements_list, attribute='select', mult=1):
+ '''
+ Generate a numpy array getting attribute from a list of element using
+ the foreach_get() function.
+ '''
+ n_elements = len(elements_list)
+ values = [0]*n_elements*mult
+ elements_list.foreach_get(attribute, values)
+ values = np.array(values)
+ if mult > 1: values = values.reshape((n_elements,mult))
+ return values
+
+def get_vertices(mesh):
+ n_verts = len(mesh.vertices)
+ verts = [0]*n_verts*3
+ mesh.vertices.foreach_get('co', verts)
+ verts = np.array(verts).reshape((n_verts,3))
+ verts = [Vector(v) for v in verts]
+ return verts
+
+def get_faces(mesh):
+ faces = [[v for v in f.vertices] for f in mesh.polygons]
+ return faces
+
+def get_faces_numpy(mesh):
+ faces = [[v for v in f.vertices] for f in mesh.polygons]
+ return np.array(faces)
+
+def get_faces_edges_numpy(mesh):
+ faces = [v.edge_keys for f in mesh.polygons]
+ return np.array(faces)
+
+def find_curves(edges, n_verts):
+ verts_dict = {key:[] for key in range(n_verts)}
+ for e in edges:
+ verts_dict[e[0]].append(e[1])
+ verts_dict[e[1]].append(e[0])
+ curves = []
+ while True:
+ if len(verts_dict) == 0: break
+ # next starting point
+ v = list(verts_dict.keys())[0]
+ # neighbors
+ v01 = verts_dict[v]
+ if len(v01) == 0:
+ verts_dict.pop(v)
+ continue
+ curve = []
+ if len(v01) > 1: curve.append(v01[1]) # add neighbors
+ curve.append(v) # add starting point
+ curve.append(v01[0]) # add neighbors
+ verts_dict.pop(v)
+ # start building curve
+ while True:
+ #last_point = curve[-1]
+ #if last_point not in verts_dict: break
+
+ # try to change direction if needed
+ if curve[-1] in verts_dict: pass
+ elif curve[0] in verts_dict: curve.reverse()
+ else: break
+
+ # neighbors points
+ last_point = curve[-1]
+ v01 = verts_dict[last_point]
+
+ # curve end
+ if len(v01) == 1:
+ verts_dict.pop(last_point)
+ if curve[0] in verts_dict: continue
+ else: break
+
+ # chose next point
+ new_point = None
+ if v01[0] == curve[-2]: new_point = v01[1]
+ elif v01[1] == curve[-2]: new_point = v01[0]
+ #else: break
+
+ #if new_point != curve[1]:
+ curve.append(new_point)
+ verts_dict.pop(last_point)
+ if curve[0] == curve[-1]:
+ verts_dict.pop(new_point)
+ break
+ curves.append(curve)
+ return curves
+
+def curve_from_points(points, name='Curve'):
+ curve = bpy.data.curves.new(name,'CURVE')
+ for c in points:
+ s = curve.splines.new('POLY')
+ s.points.add(len(c))
+ for i,p in enumerate(c): s.points[i].co = p.xyz + [1]
+ ob_curve = bpy.data.objects.new(name,curve)
+ return ob_curve
+
+def curve_from_pydata(points, radii, indexes, name='Curve', skip_open=False, merge_distance=1, set_active=True, only_data=False):
+ curve = bpy.data.curves.new(name,'CURVE')
+ curve.dimensions = '3D'
+ use_rad = True
+ for c in indexes:
+ bool_cyclic = c[0] == c[-1]
+ if bool_cyclic: c.pop(-1)
+ # cleanup
+ pts = np.array([points[i] for i in c])
+ try:
+ rad = np.array([radii[i] for i in c])
+ except:
+ use_rad = False
+ rad = 1
+ if merge_distance > 0:
+ pts1 = np.roll(pts,1,axis=0)
+ dist = np.linalg.norm(pts1-pts, axis=1)
+ count = 0
+ n = len(dist)
+ mask = np.ones(n).astype('bool')
+ for i in range(n):
+ count += dist[i]
+ if count > merge_distance: count = 0
+ else: mask[i] = False
+ pts = pts[mask]
+ if use_rad: rad = rad[mask]
+
+ if skip_open and not bool_cyclic: continue
+ s = curve.splines.new('POLY')
+ n_pts = len(pts)
+ s.points.add(n_pts-1)
+ w = np.ones(n_pts).reshape((n_pts,1))
+ co = np.concatenate((pts,w),axis=1).reshape((n_pts*4))
+ s.points.foreach_set('co',co)
+ if use_rad: s.points.foreach_set('radius',rad)
+ s.use_cyclic_u = bool_cyclic
+ if only_data:
+ return curve
+ else:
+ ob_curve = bpy.data.objects.new(name,curve)
+ bpy.context.collection.objects.link(ob_curve)
+ if set_active:
+ bpy.context.view_layer.objects.active = ob_curve
+ return ob_curve
+
+def update_curve_from_pydata(curve, points, normals, radii, indexes, merge_distance=1, pattern=[1,0], depth=0.1, offset=0):
+ curve.splines.clear()
+ use_rad = True
+ for ic, c in enumerate(indexes):
+ bool_cyclic = c[0] == c[-1]
+ if bool_cyclic: c.pop(-1)
+
+ # cleanup
+ pts = np.array([points[i] for i in c if i != None])
+ nor = np.array([normals[i] for i in c if i != None])
+ try:
+ rad = np.array([radii[i] for i in c if i != None])
+ except:
+ use_rad = False
+ rad = 1
+ if merge_distance > 0:
+ pts1 = np.roll(pts,1,axis=0)
+ dist = np.linalg.norm(pts1-pts, axis=1)
+ count = 0
+ n = len(dist)
+ mask = np.ones(n).astype('bool')
+ for i in range(n):
+ count += dist[i]
+ if count > merge_distance: count = 0
+ else: mask[i] = False
+ pts = pts[mask]
+ nor = nor[mask]
+ if use_rad: rad = rad[mask]
+ #if skip_open and not bool_cyclic: continue
+ n_pts = len(pts)
+ series = np.arange(n_pts)
+ patt1 = series + (series-series%pattern[1])/pattern[1]*pattern[0]+pattern[0]
+ patt1 = patt1[patt1<n_pts].astype('int')
+ patt0 = series + (series-series%pattern[0])/pattern[0]*pattern[1]
+ patt0 = patt0[patt0<n_pts].astype('int')
+ nor[patt0] *= 0.5*depth*(1 + offset)
+ nor[patt1] *= 0.5*depth*(-1 + offset)
+ if pattern[0]*pattern[1] != 0: pts += nor
+ s = curve.splines.new('POLY')
+ s.points.add(n_pts-1)
+ w = np.ones(n_pts).reshape((n_pts,1))
+ co = np.concatenate((pts,w),axis=1).reshape((n_pts*4))
+ s.points.foreach_set('co',co)
+ if use_rad: s.points.foreach_set('radius',rad)
+ s.use_cyclic_u = bool_cyclic
+
+def loops_from_bmesh(edges):
+ """
+ Return one or more loops given some starting edges.
+ :arg edges: Edges used as seeds.
+ :type edges: List of :class:'bmesh.types.BMEdge'
+ :return: Elements in each loop (Verts, Edges), where:
+ - Verts - List of Lists of :class:'bmesh.types.BMVert'
+ - Edges - List of Lists of :class:'bmesh.types.BMEdge'
+ :rtype: tuple
+ """
+ todo_edges = list(edges)
+ #todo_edges = [e.index for e in bm.edges]
+ vert_loops = []
+ edge_loops = []
+ while len(todo_edges) > 0:
+ edge = todo_edges[0]
+ vert_loop, edge_loop = run_edge_loop(edge)
+ for e in edge_loop:
+ try: todo_edges.remove(e)
+ except: pass
+ edge_loops.append(edge_loop)
+ vert_loops.append(vert_loop)
+ #if len(todo_edges) == 0: break
+ return vert_loops, edge_loops
+
+def run_edge_loop_direction(edge,vert):
+ """
+ Return vertices and edges along a loop in a specific direction.
+ :arg edge: Edges used as seed.
+ :type edges: :class:'bmesh.types.BMEdge'
+ :arg edge: Vertex of the Edge used for the direction.
+ :type vert: :class:'bmesh.types.BMVert'
+ :return: Elements in the loop (Verts, Edges), where:
+ - Verts - List of :class:'bmesh.types.BMVert'
+ - Edges - List of :class:'bmesh.types.BMEdge'
+ :rtype: tuple
+ """
+ edge0 = edge
+ edge_loop = [edge]
+ vert_loop = [vert]
+ while True:
+ link_edges = list(vert.link_edges)
+ link_edges.remove(edge)
+ n_edges = len(link_edges)
+ if n_edges == 1:
+ edge = link_edges[0]
+ elif n_edges < 4:
+ link_faces = edge.link_faces
+ if len(link_faces) == 0: break
+ edge = None
+ for e in link_edges:
+ link_faces1 = e.link_faces
+ if len(link_faces) == len(link_faces1):
+ common_faces = [f for f in link_faces1 if f in link_faces]
+ if len(common_faces) == 0:
+ edge = e
+ break
+ else: break
+ if edge == None: break
+ edge_loop.append(edge)
+ vert = edge.other_vert(vert)
+ vert_loop.append(vert)
+ if edge == edge0: break
+ return vert_loop, edge_loop
+
+def run_edge_loop(edge):
+ """
+ Return vertices and edges along a loop in both directions.
+ :arg edge: Edges used as seed.
+ :type edges: :class:'bmesh.types.BMEdge'
+ :return: Elements in the loop (Verts, Edges), where:
+ - Verts - List of :class:'bmesh.types.BMVert'
+ - Edges - List of :class:'bmesh.types.BMEdge'
+ :rtype: tuple
+ """
+ vert0 = edge.verts[0]
+ vert_loop0, edge_loop0 = run_edge_loop_direction(edge, vert0)
+ if len(edge_loop0) == 1 or edge_loop0[0] != edge_loop0[-1]:
+ vert1 = edge.verts[1]
+ vert_loop1, edge_loop1 = run_edge_loop_direction(edge, vert1)
+ edge_loop0.reverse()
+ vert_loop0.reverse()
+ edge_loop = edge_loop0[:-1] + edge_loop1
+ vert_loop = vert_loop0 + vert_loop1
+ else:
+ edge_loop = edge_loop0[1:]
+ vert_loop = vert_loop0
+ return vert_loop, edge_loop
+
+def curve_from_vertices(indexes, verts, name='Curve'):
+ """
+ Curve data from given vertices.
+ :arg indexes: List of Lists of indexes of the vertices.
+ :type indexes: List of Lists of int
+ :arg verts: List of vertices.
+ :type verts: List of :class:'bpy.types.MeshVertex'
+ :arg name: Name of the Curve data.
+ :type name: str
+ :return: Generated Curve data
+ :rtype: :class:'bpy.types.Curve'
+ """
+ curve = bpy.data.curves.new(name,'CURVE')
+ for c in indexes:
+ s = curve.splines.new('POLY')
+ s.points.add(len(c))
+ for i,p in enumerate(c):
+ s.points[i].co = verts[p].co.xyz + [1]
+ #s.points[i].tilt = degrees(asin(verts[p].co.z))
+ ob_curve = bpy.data.objects.new(name,curve)
+ return ob_curve
+
+def nurbs_from_vertices(indexes, co, radii=[], name='Curve', set_active=True, interpolation='POLY'):
+ curve = bpy.data.curves.new(name,'CURVE')
+ curve.dimensions = '3D'
+ curve.resolution_u = 2
+ curve.bevel_depth = 0.01
+ curve.bevel_resolution = 0
+ for pts in indexes:
+ s = curve.splines.new(interpolation)
+ n_pts = len(pts)
+ s.points.add(n_pts-1)
+ w = np.ones(n_pts).reshape((n_pts,1))
+ curve_co = np.concatenate((co[pts],w),axis=1).reshape((n_pts*4))
+ s.points.foreach_set('co',curve_co)
+ try:
+ s.points.foreach_set('radius',radii[pts])
+ except: pass
+ s.use_endpoint_u = True
+
+ ob_curve = bpy.data.objects.new(name,curve)
+ bpy.context.collection.objects.link(ob_curve)
+ if set_active:
+ bpy.context.view_layer.objects.active = ob_curve
+ ob_curve.select_set(True)
+ return ob_curve
+
+# ------------------------------------------------------------------
+# VERTEX GROUPS AND WEIGHT
+# ------------------------------------------------------------------
+
+def get_weight(vertex_group, n_verts):
+ """
+ Read weight values from given Vertex Group.
+ :arg vertex_group: Vertex Group.
+ :type vertex_group: :class:'bpy.types.VertexGroup'
+ :arg n_verts: Number of Vertices (output list size).
+ :type n_verts: int
+ :return: Readed weight values.
+ :rtype: list
+ """
+ weight = [0]*n_verts
+ for i in range(n_verts):
+ try: weight[i] = vertex_group.weight(i)
+ except: pass
+ return weight
+
+def get_weight_numpy(vertex_group, n_verts):
+ """
+ Read weight values from given Vertex Group.
+ :arg vertex_group: Vertex Group.
+ :type vertex_group: :class:'bpy.types.VertexGroup'
+ :arg n_verts: Number of Vertices (output list size).
+ :type n_verts: int
+ :return: Readed weight values as numpy array.
+ :rtype: :class:'numpy.ndarray'
+ """
+ weight = [0]*n_verts
+ for i in range(n_verts):
+ try: weight[i] = vertex_group.weight(i)
+ except: pass
+ return np.array(weight)
+
+def bmesh_get_weight_numpy(group_index, layer, verts):
+ weight = np.zeros(len(verts))
+ for i, v in enumerate(verts):
+ dvert = v[layer]
+ if group_index in dvert:
+ weight[i] = dvert[group_index]
+ #dvert[group_index] = 0.5
+ return weight
+
+def bmesh_set_weight_numpy(group_index, layer, verts, weight):
+ for i, v in enumerate(verts):
+ dvert = v[layer]
+ if group_index in dvert:
+ dvert[group_index] = weight[i]
+ return verts
+
+def bmesh_set_weight_numpy(bm, group_index, weight):
+ layer = bm.verts.layers.deform.verify()
+ for i, v in enumerate(bm.verts):
+ dvert = v[layer]
+ #if group_index in dvert:
+ dvert[group_index] = weight[i]
+ return bm
+
+def set_weight_numpy(vg, weight):
+ for i, w in enumerate(weight):
+ vg.add([i], w, 'REPLACE')
+ return vg
+
+def uv_from_bmesh(bm, uv_index=None):
+ if uv_index:
+ uv_lay = bm.loops.layers.uv[uv_index]
+ else:
+ uv_lay = bm.loops.layers.uv.active
+ uv_co = [0]*len(bm.verts)
+
+ for face in bm.faces:
+ for vert,loop in zip(face.verts, face.loops):
+ uv_co[vert.index] = loop[uv_lay].uv
+ return uv_co
+
+def get_uv_edge_vectors(me, uv_map = 0, only_positive=False):
+ count = 0
+ uv_vectors = {}
+ for i, f in enumerate(me.polygons):
+ f_verts = len(f.vertices)
+ for j0 in range(f_verts):
+ j1 = (j0+1)%f_verts
+ uv0 = me.uv_layers[uv_map].data[count+j0].uv
+ uv1 = me.uv_layers[uv_map].data[count+j1].uv
+ delta_uv = (uv1-uv0).normalized()
+ if only_positive:
+ delta_uv.x = abs(delta_uv.x)
+ delta_uv.y = abs(delta_uv.y)
+ edge_key = tuple(sorted([f.vertices[j0], f.vertices[j1]]))
+ uv_vectors[edge_key] = delta_uv
+ count += f_verts
+ uv_vectors = [uv_vectors[tuple(sorted(e.vertices))] for e in me.edges]
+ return uv_vectors
+
+def mesh_diffusion(me, values, iter, diff=0.2, uv_dir=0):
+ values = np.array(values)
+ n_verts = len(me.vertices)
+
+ n_edges = len(me.edges)
+ edge_verts = [0]*n_edges*2
+ #me.edges.foreach_get("vertices", edge_verts)
+
+ count = 0
+ edge_verts = []
+ uv_factor = {}
+ uv_ang = (0.5 + uv_dir*0.5)*pi/2
+ uv_vec = Vector((cos(uv_ang), sin(uv_ang)))
+ for i, f in enumerate(me.polygons):
+ f_verts = len(f.vertices)
+ for j0 in range(f_verts):
+ j1 = (j0+1)%f_verts
+ if uv_dir != 0:
+ uv0 = me.uv_layers[0].data[count+j0].uv
+ uv1 = me.uv_layers[0].data[count+j1].uv
+ delta_uv = (uv1-uv0).normalized()
+ delta_uv.x = abs(delta_uv.x)
+ delta_uv.y = abs(delta_uv.y)
+ dir = uv_vec.dot(delta_uv)
+ else:
+ dir = 1
+ #dir = abs(dir)
+ #uv_factor.append(dir)
+ edge_key = [f.vertices[j0], f.vertices[j1]]
+ edge_key.sort()
+ uv_factor[tuple(edge_key)] = dir
+ count += f_verts
+ id0 = []
+ id1 = []
+ uv_mult = []
+ for ek, val in uv_factor.items():
+ id0.append(ek[0])
+ id1.append(ek[1])
+ uv_mult.append(val)
+ id0 = np.array(id0)
+ id1 = np.array(id1)
+ uv_mult = np.array(uv_mult)
+
+ #edge_verts = np.array(edge_verts)
+ #arr = np.arange(n_edges)*2
+
+ #id0 = edge_verts[arr] # first vertex indices for each edge
+ #id1 = edge_verts[arr+1] # second vertex indices for each edge
+ for ii in range(iter):
+ lap = np.zeros(n_verts)
+ if uv_dir != 0:
+ lap0 = (values[id1] - values[id0])*uv_mult # laplacian increment for first vertex of each edge
+ else:
+ lap0 = (values[id1] - values[id0])
+ np.add.at(lap, id0, lap0)
+ np.add.at(lap, id1, -lap0)
+ values += diff*lap
+ return values
+
+def mesh_diffusion_vector(me, vectors, iter, diff, uv_dir=0):
+ vectors = np.array(vectors)
+ x = vectors[:,0]
+ y = vectors[:,1]
+ z = vectors[:,2]
+ x = mesh_diffusion(me, x, iter, diff, uv_dir)
+ y = mesh_diffusion(me, y, iter, diff, uv_dir)
+ z = mesh_diffusion(me, z, iter, diff, uv_dir)
+ vectors[:,0] = x
+ vectors[:,1] = y
+ vectors[:,2] = z
+ return vectors
+
+# ------------------------------------------------------------------
+# MODIFIERS
+# ------------------------------------------------------------------
+
+def mod_preserve_topology(mod):
+ same_topology_modifiers = ('DATA_TRANSFER','NORMAL_EDIT','WEIGHTED_NORMAL',
+ 'UV_PROJECT','UV_WARP','VERTEX_WEIGHT_EDIT','VERTEX_WEIGHT_MIX',
+ 'VERTEX_WEIGHT_PROXIMITY','ARMATURE','CAST','CURVE','DISPLACE','HOOK',
+ 'LAPLACIANDEFORM','LATTICE','MESH_DEFORM','SHRINKWRAP','SIMPLE_DEFORM',
+ 'SMOOTH','CORRECTIVE_SMOOTH','LAPLACIANSMOOTH','SURFACE_DEFORM','WARP',
+ 'WAVE','CLOTH','COLLISION','DYNAMIC_PAINT','SOFT_BODY'
+ )
+ return mod.type in same_topology_modifiers
+
+def mod_preserve_shape(mod):
+ same_shape_modifiers = ('DATA_TRANSFER','NORMAL_EDIT','WEIGHTED_NORMAL',
+ 'UV_PROJECT','UV_WARP','VERTEX_WEIGHT_EDIT','VERTEX_WEIGHT_MIX',
+ 'VERTEX_WEIGHT_PROXIMITY','DYNAMIC_PAINT'
+ )
+ return mod.type in same_shape_modifiers
+
+
+def recurLayerCollection(layerColl, collName):
+ '''
+ Recursivly transverse layer_collection for a particular name.
+ '''
+ found = None
+ if (layerColl.name == collName):
+ return layerColl
+ for layer in layerColl.children:
+ found = recurLayerCollection(layer, collName)
+ if found:
+ return found
+
+def auto_layer_collection():
+ '''
+ Automatically change active layer collection.
+ '''
+ layer = bpy.context.view_layer.active_layer_collection
+ layer_collection = bpy.context.view_layer.layer_collection
+ if layer.hide_viewport or layer.collection.hide_viewport:
+ collections = bpy.context.object.users_collection
+ for c in collections:
+ lc = recurLayerCollection(layer_collection, c.name)
+ if not c.hide_viewport and not lc.hide_viewport:
+ bpy.context.view_layer.active_layer_collection = lc
diff --git a/mesh_tissue/utils_pip.py b/mesh_tissue/utils_pip.py
new file mode 100644
index 00000000..adfad77f
--- /dev/null
+++ b/mesh_tissue/utils_pip.py
@@ -0,0 +1,154 @@
+# -*- coding:utf-8 -*-
+
+# ##### BEGIN GPL LICENSE BLOCK #####
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License
+# as published by the Free Software Foundation; either version 2
+# of the License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+#
+# ##### END GPL LICENSE BLOCK #####
+
+# <pep8 compliant>
+
+# ----------------------------------------------------------
+# Author: Stephen Leger (s-leger)
+#
+# ----------------------------------------------------------
+import bpy
+import subprocess
+import sys
+
+
+PYPATH = sys.executable #bpy.app.binary_path_python
+
+
+class Pip:
+
+ def __init__(self):
+ self._ensurepip()
+
+ @staticmethod
+ def _ensure_user_site_package():
+ import os
+ import site
+ import sys
+ site_package = site.getusersitepackages()
+ if not os.path.exists(site_package):
+ site_package = bpy.utils.user_resource('SCRIPTS', "site_package", create=True)
+ site.addsitedir(site_package)
+ if site_package not in sys.path:
+ sys.path.append(site_package)
+ '''
+ @staticmethod
+ def _ensure_user_site_package():
+ import os
+ import site
+ import sys
+ site_package = site.getusersitepackages()
+ if os.path.exists(site_package):
+ if site_package not in sys.path:
+ sys.path.append(site_package)
+ else:
+ site_package = bpy.utils.user_resource('SCRIPTS', "site_package", create=True)
+ site.addsitedir(site_package)
+ '''
+ def _cmd(self, action, options, module):
+ if options is not None and "--user" in options:
+ self._ensure_user_site_package()
+
+ cmd = [PYPATH, "-m", "pip", action]
+
+ if options is not None:
+ cmd.extend(options.split(" "))
+
+ cmd.append(module)
+ return self._run(cmd)
+
+ def _popen(self, cmd):
+ popen = subprocess.Popen(cmd, stdout=subprocess.PIPE, universal_newlines=True)
+ for stdout_line in iter(popen.stdout.readline, ""):
+ yield stdout_line
+ popen.stdout.close()
+ popen.wait()
+
+ def _run(self, cmd):
+ res = False
+ status = ""
+ for line in self._popen(cmd):
+ if "ERROR:" in line:
+ status = line.strip()
+ if "Error:" in line:
+ status = line.strip()
+ print(line)
+ if "Successfully" in line:
+ status = line.strip()
+ res = True
+ return res, status
+
+ def _ensurepip(self):
+ pip_not_found = False
+ try:
+ import pip
+ except ImportError:
+ pip_not_found = True
+ pass
+ if pip_not_found:
+ self._run([PYPATH, "-m", "ensurepip", "--default-pip"])
+
+ @staticmethod
+ def upgrade_pip():
+ return Pip()._cmd("install", "--upgrade", "pip")
+
+ @staticmethod
+ def uninstall(module, options=None):
+ """
+ :param module: string module name with requirements see:[1]
+ :param options: string command line options see:[2]
+ :return: True on uninstall, False if already removed, raise on Error
+ [1] https://pip.pypa.io/en/stable/reference/pip_install/#id29
+ [2] https://pip.pypa.io/en/stable/reference/pip_install/#id47
+ """
+ if options is None or options.strip() == "":
+ # force confirm
+ options = "-y"
+ return Pip()._cmd("uninstall", options, module)
+
+ @staticmethod
+ def install(module, options=None):
+ """
+ :param module: string module name with requirements see:[1]
+ :param options: string command line options see:[2]
+ :return: True on install, False if already there, raise on Error
+ [1] https://pip.pypa.io/en/stable/reference/pip_install/#id29
+ [2] https://pip.pypa.io/en/stable/reference/pip_install/#id47
+ """
+ if options is None or options.strip() == "":
+ # store in user writable directory, use wheel, without deps
+ options = "--user --only-binary all --no-deps"
+ return Pip()._cmd("install", options, module)
+
+ @staticmethod
+ def blender_version():
+ """
+ :return: blender version tuple
+ """
+ return bpy.app.version
+
+ @staticmethod
+ def python_version():
+ """
+ :return: python version object
+ """
+ import sys
+ # version.major, version.minor, version.micro
+ return sys.version_info
diff --git a/mesh_tissue/uv_to_mesh.py b/mesh_tissue/uv_to_mesh.py
index 9f3db3b7..13cb09e7 100644
--- a/mesh_tissue/uv_to_mesh.py
+++ b/mesh_tissue/uv_to_mesh.py
@@ -12,7 +12,7 @@
# #
# ############################################################################ #
-import bpy
+import bpy, bmesh
import math
from bpy.types import Operator
from bpy.props import BoolProperty
@@ -48,48 +48,45 @@ class uv_to_mesh(Operator):
)
def execute(self, context):
+ if context.mode == 'EDIT_MESH': on_selection = True
+ else: on_selection = False
+
bpy.ops.object.mode_set(mode='OBJECT')
- for o in bpy.data.objects and bpy.context.view_layer.objects:
- o.select_set(False)
- bpy.context.object.select_set(True)
-
- if self.apply_modifiers:
- bpy.ops.object.duplicate_move()
- bpy.ops.object.convert(target='MESH')
- ob0 = bpy.context.object
-
-# me0 = ob0.to_mesh(bpy.context.depsgraph, apply_modifiers=self.apply_modifiers)
- #if self.apply_modifiers: me0 = simple_to_mesh(ob0)
- #else: me0 = ob0.data.copy()
+ ob0 = context.object
+ for o in bpy.context.view_layer.objects: o.select_set(False)
+ ob0.select_set(True)
+
name0 = ob0.name
ob0 = convert_object_to_mesh(ob0, apply_modifiers=self.apply_modifiers, preserve_status=False)
me0 = ob0.data
area = 0
-
verts = []
faces = []
face_materials = []
- for face in me0.polygons:
+ if on_selection: polygons = [f for f in me0.polygons if f.select]
+ else: polygons = me0.polygons
+ bm = bmesh.new()
+
+ for face in polygons:
area += face.area
uv_face = []
store = False
- try:
+ if len(me0.uv_layers) > 0:
+ verts = []
for loop in face.loop_indices:
uv = me0.uv_layers.active.data[loop].uv
if uv.x != 0 and uv.y != 0:
store = True
- new_vert = Vector((uv.x, uv.y, 0))
+ new_vert = bm.verts.new((uv.x, uv.y, 0))
verts.append(new_vert)
- uv_face.append(loop)
if store:
- faces.append(uv_face)
- face_materials.append(face.material_index)
- except:
+ new_face = bm.faces.new(verts)
+ new_face.material_index = face.material_index
+ else:
self.report({'ERROR'}, "Missing UV Map")
-
return {'CANCELLED'}
- name = name0 + 'UV'
+ name = name0 + '_UV'
# Create mesh and object
me = bpy.data.meshes.new(name + 'Mesh')
ob = bpy.data.objects.new(name, me)
@@ -101,9 +98,10 @@ class uv_to_mesh(Operator):
ob.select_set(True)
# Create mesh from given verts, faces.
- me.from_pydata(verts, [], faces)
+ bm.to_mesh(me)
# Update mesh with new data
me.update()
+
if self.auto_scale:
new_area = 0
for p in me.polygons:
@@ -111,7 +109,6 @@ class uv_to_mesh(Operator):
if new_area == 0:
self.report({'ERROR'}, "Impossible to generate mesh from UV")
bpy.data.objects.remove(ob0)
-
return {'CANCELLED'}
# VERTEX GROUPS
@@ -119,7 +116,7 @@ class uv_to_mesh(Operator):
for group in ob0.vertex_groups:
index = group.index
ob.vertex_groups.new(name=group.name)
- for p in me0.polygons:
+ for p in polygons:
for vert, loop in zip(p.vertices, p.loop_indices):
try:
ob.vertex_groups[index].add([loop], group.weight(vert), 'REPLACE')
@@ -138,25 +135,12 @@ class uv_to_mesh(Operator):
# MATERIALS
if self.materials:
- try:
+ if len(ob0.material_slots) > 0:
# assign old material
uv_materials = [slot.material for slot in ob0.material_slots]
for i in range(len(uv_materials)):
bpy.ops.object.material_slot_add()
bpy.context.object.material_slots[i].material = uv_materials[i]
- for i in range(len(ob.data.polygons)):
- ob.data.polygons[i].material_index = face_materials[i]
- except:
- pass
- '''
- if self.apply_modifiers:
- bpy.ops.object.mode_set(mode='OBJECT')
- ob.select_set(False)
- ob0.select_set(True)
- bpy.ops.object.delete(use_global=False)
- ob.select_set(True)
- bpy.context.view_layer.objects.active = ob
- '''
bpy.data.objects.remove(ob0)
bpy.data.meshes.remove(me0)
diff --git a/mesh_tissue/weight_tools.py b/mesh_tissue/weight_tools.py
new file mode 100644
index 00000000..d58adc6f
--- /dev/null
+++ b/mesh_tissue/weight_tools.py
@@ -0,0 +1,4681 @@
+# ##### BEGIN GPL LICENSE BLOCK #####
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License
+# as published by the Free Software Foundation; either version 2
+# of the License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+#
+# ##### END GPL LICENSE BLOCK #####
+
+#-------------------------- COLORS / GROUPS EXCHANGER -------------------------#
+# #
+# Vertex Color to Vertex Group allow you to convert colors channles to weight #
+# maps. #
+# The main purpose is to use vertex colors to store information when importing #
+# files from other softwares. The script works with the active vertex color #
+# slot. #
+# For use the command "Vertex Clors to Vertex Groups" use the search bar #
+# (space bar). #
+# #
+# (c) Alessandro Zomparelli #
+# (2017) #
+# #
+# http://www.co-de-it.com/ #
+# #
+################################################################################
+
+import bpy, bmesh, os
+import numpy as np
+import math, timeit, time
+from math import pi
+from statistics import mean, stdev
+from mathutils import Vector
+from mathutils.kdtree import KDTree
+from numpy import *
+try: from .numba_functions import numba_reaction_diffusion, numba_reaction_diffusion_anisotropic, integrate_field
+except: pass
+#from .numba_functions import integrate_field
+#from .numba_functions import numba_reaction_diffusion
+try: import numexpr as ne
+except: pass
+
+# Reaction-Diffusion cache
+from pathlib import Path
+import random as rnd
+import string
+
+from bpy.types import (
+ Operator,
+ Panel,
+ PropertyGroup,
+ )
+
+from bpy.props import (
+ BoolProperty,
+ EnumProperty,
+ FloatProperty,
+ IntProperty,
+ StringProperty,
+ FloatVectorProperty,
+ IntVectorProperty
+)
+
+from .utils import *
+
+def reaction_diffusion_add_handler(self, context):
+ # remove existing handlers
+ reaction_diffusion_remove_handler(self, context)
+ # add new handler
+ bpy.app.handlers.frame_change_post.append(reaction_diffusion_scene)
+
+def reaction_diffusion_remove_handler(self, context):
+ # remove existing handlers
+ old_handlers = []
+ for h in bpy.app.handlers.frame_change_post:
+ if "reaction_diffusion" in str(h):
+ old_handlers.append(h)
+ for h in old_handlers: bpy.app.handlers.frame_change_post.remove(h)
+
+class formula_prop(PropertyGroup):
+ name : StringProperty()
+ formula : StringProperty()
+ float_var : FloatVectorProperty(name="", description="", default=(0, 0, 0, 0, 0), size=5)
+ int_var : IntVectorProperty(name="", description="", default=(0, 0, 0, 0, 0), size=5)
+
+class reaction_diffusion_prop(PropertyGroup):
+ run : BoolProperty(default=False, update = reaction_diffusion_add_handler,
+ description='Compute a new iteration on frame changes. Currently is not working during Render Animation')
+
+ time_steps : IntProperty(
+ name="Steps", default=10, min=0, soft_max=50,
+ description="Number of Steps")
+
+ dt : FloatProperty(
+ name="dt", default=1, min=0, soft_max=0.2,
+ description="Time Step")
+
+ diff_a : FloatProperty(
+ name="Diff A", default=0.1, min=0, soft_max=2, precision=3,
+ description="Diffusion A")
+
+ diff_b : FloatProperty(
+ name="Diff B", default=0.05, min=0, soft_max=2, precision=3,
+ description="Diffusion B")
+
+ f : FloatProperty(
+ name="f", default=0.055, soft_min=0.01, soft_max=0.06, precision=4, step=0.05,
+ description="Feed Rate")
+
+ k : FloatProperty(
+ name="k", default=0.062, soft_min=0.035, soft_max=0.065, precision=4, step=0.05,
+ description="Kill Rate")
+
+ diff_mult : FloatProperty(
+ name="Scale", default=1, min=0, soft_max=1, max=10, precision=2,
+ description="Multiplier for the diffusion of both substances")
+
+ vertex_group_diff_a : StringProperty(
+ name="Diff A", default='',
+ description="Vertex Group used for A diffusion")
+
+ vertex_group_diff_b : StringProperty(
+ name="Diff B", default='',
+ description="Vertex Group used for B diffusion")
+
+ vertex_group_scale : StringProperty(
+ name="Scale", default='',
+ description="Vertex Group used for Scale value")
+
+ vertex_group_f : StringProperty(
+ name="f", default='',
+ description="Vertex Group used for Feed value (f)")
+
+ vertex_group_k : StringProperty(
+ name="k", default='',
+ description="Vertex Group used for Kill value (k)")
+
+ vertex_group_brush : StringProperty(
+ name="Brush", default='',
+ description="Vertex Group used for adding/removing B")
+
+ invert_vertex_group_diff_a : BoolProperty(default=False,
+ description='Inverte the value of the Vertex Group Diff A')
+
+ invert_vertex_group_diff_b : BoolProperty(default=False,
+ description='Inverte the value of the Vertex Group Diff B')
+
+ invert_vertex_group_scale : BoolProperty(default=False,
+ description='Inverte the value of the Vertex Group Scale')
+
+ invert_vertex_group_f : BoolProperty(default=False,
+ description='Inverte the value of the Vertex Group f')
+
+ invert_vertex_group_k : BoolProperty(default=False,
+ description='Inverte the value of the Vertex Group k')
+
+ min_diff_a : FloatProperty(
+ name="Min Diff A", default=0.1, min=0, soft_max=2, precision=3,
+ description="Min Diff A")
+
+ max_diff_a : FloatProperty(
+ name="Max Diff A", default=0.1, min=0, soft_max=2, precision=3,
+ description="Max Diff A")
+
+ min_diff_b : FloatProperty(
+ name="Min Diff B", default=0.1, min=0, soft_max=2, precision=3,
+ description="Min Diff B")
+
+ max_diff_b : FloatProperty(
+ name="Max Diff B", default=0.1, min=0, soft_max=2, precision=3,
+ description="Max Diff B")
+
+ min_scale : FloatProperty(
+ name="Scale", default=0.35, min=0, soft_max=1, max=10, precision=2,
+ description="Min Scale Value")
+
+ max_scale : FloatProperty(
+ name="Scale", default=1, min=0, soft_max=1, max=10, precision=2,
+ description="Max Scale value")
+
+ min_f : FloatProperty(
+ name="Min f", default=0.02, min=0, soft_min=0.01, soft_max=0.06, max=0.1, precision=4, step=0.05,
+ description="Min Feed Rate")
+
+ max_f : FloatProperty(
+ name="Max f", default=0.055, min=0, soft_min=0.01, soft_max=0.06, max=0.1, precision=4, step=0.05,
+ description="Max Feed Rate")
+
+ min_k : FloatProperty(
+ name="Min k", default=0.035, min=0, soft_min=0.035, soft_max=0.065, max=0.1, precision=4, step=0.05,
+ description="Min Kill Rate")
+
+ max_k : FloatProperty(
+ name="Max k", default=0.062, min=0, soft_min=0.035, soft_max=0.065, max=0.1, precision=4, step=0.05,
+ description="Max Kill Rate")
+
+ brush_mult : FloatProperty(
+ name="Mult", default=0.5, min=-1, max=1, precision=3, step=0.05,
+ description="Multiplier for brush value")
+
+ bool_mod : BoolProperty(
+ name="Use Modifiers", default=False,
+ description="Read modifiers affect the vertex groups")
+
+ bool_cache : BoolProperty(
+ name="Use Cache", default=False,
+ description="Read modifiers affect the vertex groups")
+
+ cache_frame_start : IntProperty(
+ name="Start", default=1,
+ description="Frame on which the simulation starts")
+
+ cache_frame_end : IntProperty(
+ name="End", default=250,
+ description="Frame on which the simulation ends")
+
+ cache_dir : StringProperty(
+ name="Cache directory", default="", subtype='FILE_PATH',
+ description = 'Directory that contains Reaction-Diffusion cache files'
+ )
+
+ update_weight_a : BoolProperty(
+ name="Update Vertex Group A", default=True,
+ description="Tranfer Cache to the Vertex Groups named A")
+
+ update_weight_b : BoolProperty(
+ name="Update Vertex Group B", default=True,
+ description="Tranfer Cache to the Vertex Groups named B")
+
+ update_colors_a : BoolProperty(
+ name="Update Vertex Color A", default=False,
+ description="Tranfer Cache to the Vertex Color named A")
+
+ update_colors_b : BoolProperty(
+ name="Update Vertex Color B", default=False,
+ description="Tranfer Cache to the Vertex Color named B")
+
+ update_colors : BoolProperty(
+ name="Update Vertex Color AB", default=False,
+ description="Tranfer Cache to the Vertex Color named AB")
+
+ update_uv : BoolProperty(
+ name="Update UV", default=False,
+ description="Tranfer Cache to the UV Map Layer named AB")
+
+ normalize : BoolProperty(
+ name="Normalize values", default=False,
+ description="Normalize values from 0 to 1")
+
+ fast_bake : BoolProperty(
+ name="Fast Bake", default=True,
+ description="Do not update modifiers or vertex groups while baking. Much faster!")
+
+
+from numpy import *
+def compute_formula(ob=None, formula="rx", float_var=(0,0,0,0,0), int_var=(0,0,0,0,0)):
+ verts = ob.data.vertices
+ n_verts = len(verts)
+
+ f1,f2,f3,f4,f5 = float_var
+ i1,i2,i3,i4,i5 = int_var
+
+ do_groups = "w[" in formula
+ do_local = "lx" in formula or "ly" in formula or "lz" in formula
+ do_global = "gx" in formula or "gy" in formula or "gz" in formula
+ do_relative = "rx" in formula or "ry" in formula or "rz" in formula
+ do_normal = "nx" in formula or "ny" in formula or "nz" in formula
+ mat = ob.matrix_world
+
+ for i in range(1000):
+ if "w["+str(i)+"]" in formula and i > len(ob.vertex_groups)-1:
+ return "w["+str(i)+"] not found"
+
+ w = []
+ for i in range(len(ob.vertex_groups)):
+ w.append([])
+ if "w["+str(i)+"]" in formula:
+ vg = ob.vertex_groups[i]
+ for v in verts:
+ try:
+ w[i].append(vg.weight(v.index))
+ except:
+ w[i].append(0)
+ w[i] = array(w[i])
+
+ start_time = timeit.default_timer()
+ # compute vertex coordinates
+ if do_local or do_relative or do_global:
+ co = [0]*n_verts*3
+ verts.foreach_get('co', co)
+ np_co = array(co).reshape((n_verts, 3))
+ lx, ly, lz = array(np_co).transpose()
+ if do_relative:
+ rx = np.interp(lx, (lx.min(), lx.max()), (0, +1))
+ ry = np.interp(ly, (ly.min(), ly.max()), (0, +1))
+ rz = np.interp(lz, (lz.min(), lz.max()), (0, +1))
+ if do_global:
+ co = [v.co for v in verts]
+ global_co = []
+ for v in co:
+ global_co.append(mat @ v)
+ global_co = array(global_co).reshape((n_verts, 3))
+ gx, gy, gz = array(global_co).transpose()
+ # compute vertex normals
+ if do_normal:
+ normal = [0]*n_verts*3
+ verts.foreach_get('normal', normal)
+ normal = array(normal).reshape((n_verts, 3))
+ nx, ny, nz = array(normal).transpose()
+
+ try:
+ weight = eval(formula)
+ return weight
+ except:
+ return "There is something wrong"
+ print("Weight Formula: " + str(timeit.default_timer() - start_time))
+
+class weight_formula_wiki(Operator):
+ bl_idname = "scene.weight_formula_wiki"
+ bl_label = "Online Documentation"
+ bl_options = {'REGISTER', 'UNDO'}
+
+ def execute(self, context):
+ bpy.ops.wm.url_open(url="https://github.com/alessandro-zomparelli/tissue/wiki/Weight-Tools#weight-formula")
+ return {'FINISHED'}
+
+class weight_formula(Operator):
+ bl_idname = "object.weight_formula"
+ bl_label = "Weight Formula"
+ bl_description = "Generate a Vertex Group according to a mathematical formula"
+ bl_options = {'REGISTER', 'UNDO'}
+
+ ex_items = [
+ ('cos(arctan(nx/ny)*i1*2 + sin(rz*i3))/i2 + cos(arctan(nx/ny)*i1*2 - sin(rz*i3))/i2 + 0.5','Vertical Spots'),
+ ('cos(arctan(nx/ny)*i1*2 + sin(rz*i2))/2 + cos(arctan(nx/ny)*i1*2 - sin(rz*i2))/2','Vertical Spots'),
+ ('(sin(arctan(nx/ny)*i1*2)*sin(nz*i1*2)+1)/2','Grid Spots'),
+ ('cos(arctan(nx/ny)*f1)','Vertical Stripes'),
+ ('cos(arctan(lx/ly)*f1 + sin(rz*f2)*f3)','Curly Stripes'),
+ ('sin(rz*pi*i1+arctan2(nx,ny))/2+0.5', 'Vertical Spiral'),
+ ('sin(nx*15)<sin(ny*15)','Chess'),
+ ('cos(ny*rz**2*i1)','Hyperbolic'),
+ ('sin(rx*30) > 0','Step Stripes'),
+ ('sin(nz*i1)','Normal Stripes'),
+ ('w[0]**2','Vertex Group square'),
+ ('abs(0.5-rz)*2','Double vertical gradient'),
+ ('rz', 'Vertical Gradient')
+ ]
+ _ex_items = list((str(i),'{} ( {} )'.format(s[0],s[1]),s[1]) for i,s in enumerate(ex_items))
+ _ex_items.append(('CUSTOM', "User Formula", ""))
+
+ examples : EnumProperty(
+ items = _ex_items, default='CUSTOM', name="Examples")
+
+ old_ex = ""
+
+ formula : StringProperty(
+ name="Formula", default="", description="Formula to Evaluate")
+
+ slider_f01 : FloatProperty(
+ name="f1", default=1, description="Slider Float 1")
+ slider_f02 : FloatProperty(
+ name="f2", default=1, description="Slider Float 2")
+ slider_f03 : FloatProperty(
+ name="f3", default=1, description="Slider Float 3")
+ slider_f04 : FloatProperty(
+ name="f4", default=1, description="Slider Float 4")
+ slider_f05 : FloatProperty(
+ name="f5", default=1, description="Slider Float 5")
+ slider_i01 : IntProperty(
+ name="i1", default=1, description="Slider Integer 1")
+ slider_i02 : IntProperty(
+ name="i2", default=1, description="Slider Integer 2")
+ slider_i03 : IntProperty(
+ name="i3", default=1, description="Slider Integer 3")
+ slider_i04 : IntProperty(
+ name="i4", default=1, description="Slider Integer 4")
+ slider_i05 : IntProperty(
+ name="i5", default=1, description="Slider Integer 5")
+
+ def invoke(self, context, event):
+ return context.window_manager.invoke_props_dialog(self, width=350)
+
+ def draw(self, context):
+ layout = self.layout
+ #layout.label(text="Examples")
+ layout.prop(self, "examples", text="Examples")
+ #if self.examples == 'CUSTOM':
+ layout.label(text="Formula")
+ layout.prop(self, "formula", text="")
+ #try: self.examples = self.formula
+ #except: pass
+
+ if self.examples != 'CUSTOM':
+ example = self.ex_items[int(self.examples)][0]
+ if example != self.old_ex:
+ self.formula = example
+ self.old_ex = example
+ elif self.formula != example:
+ self.examples = 'CUSTOM'
+ formula = self.formula
+
+ layout.separator()
+ if "f1" in formula: layout.prop(self, "slider_f01")
+ if "f2" in formula: layout.prop(self, "slider_f02")
+ if "f3" in formula: layout.prop(self, "slider_f03")
+ if "f4" in formula: layout.prop(self, "slider_f04")
+ if "f5" in formula: layout.prop(self, "slider_f05")
+ if "i1" in formula: layout.prop(self, "slider_i01")
+ if "i2" in formula: layout.prop(self, "slider_i02")
+ if "i3" in formula: layout.prop(self, "slider_i03")
+ if "i4" in formula: layout.prop(self, "slider_i04")
+ if "i5" in formula: layout.prop(self, "slider_i05")
+
+ layout.label(text="Variables (for each vertex):")
+ layout.label(text="lx, ly, lz: Local Coordinates", icon='ORIENTATION_LOCAL')
+ layout.label(text="gx, gy, gz: Global Coordinates", icon='WORLD')
+ layout.label(text="rx, ry, rz: Local Coordinates (0 to 1)", icon='NORMALIZE_FCURVES')
+ layout.label(text="nx, ny, nz: Normal Coordinates", icon='SNAP_NORMAL')
+ layout.label(text="w[0], w[1], w[2], ... : Vertex Groups", icon="GROUP_VERTEX")
+ layout.separator()
+ layout.label(text="f1, f2, f3, f4, f5: Float Sliders", icon='MOD_HUE_SATURATION')#PROPERTIES
+ layout.label(text="i1, i2, i3, i4, i5: Integer Sliders", icon='MOD_HUE_SATURATION')
+ layout.separator()
+ #layout.label(text="All mathematical functions are based on Numpy", icon='INFO')
+ #layout.label(text="https://docs.scipy.org/doc/numpy-1.13.0/reference/routines.math.html", icon='INFO')
+ layout.operator("scene.weight_formula_wiki", icon="HELP")
+ #layout.label(text="(where 'i' is the index of the Vertex Group)")
+
+ def execute(self, context):
+ ob = context.active_object
+ n_verts = len(ob.data.vertices)
+ #if self.examples == 'CUSTOM':
+ # formula = self.formula
+ #else:
+ #self.formula = self.examples
+ # formula = self.examples
+
+ #f1, f2, f3, f4, f5 = self.slider_f01, self.slider_f02, self.slider_f03, self.slider_f04, self.slider_f05
+ #i1, i2, i3, i4, i5 = self.slider_i01, self.slider_i02, self.slider_i03, self.slider_i04, self.slider_i05
+ f_sliders = self.slider_f01, self.slider_f02, self.slider_f03, self.slider_f04, self.slider_f05
+ i_sliders = self.slider_i01, self.slider_i02, self.slider_i03, self.slider_i04, self.slider_i05
+
+ if self.examples != 'CUSTOM':
+ example = self.ex_items[int(self.examples)][0]
+ if example != self.old_ex:
+ self.formula = example
+ self.old_ex = example
+ elif self.formula != example:
+ self.examples = 'CUSTOM'
+ formula = self.formula
+
+ if formula == "": return {'FINISHED'}
+ # replace numeric sliders value
+ for i, slider in enumerate(f_sliders):
+ formula = formula.replace('f'+str(i+1),"{0:.2f}".format(slider))
+ for i, slider in enumerate(i_sliders):
+ formula =formula.replace('i'+str(i+1),str(slider))
+ vertex_group_name = "" + formula
+ ob.vertex_groups.new(name=vertex_group_name)
+
+ weight = compute_formula(ob, formula=formula, float_var=f_sliders, int_var=i_sliders)
+ if type(weight) == str:
+ self.report({'ERROR'}, weight)
+ return {'CANCELLED'}
+
+ #start_time = timeit.default_timer()
+ weight = nan_to_num(weight)
+ vg = ob.vertex_groups[-1]
+ if type(weight) == int or type(weight) == float:
+ for i in range(n_verts):
+ vg.add([i], weight, 'REPLACE')
+ elif type(weight) == ndarray:
+ for i in range(n_verts):
+ vg.add([i], weight[i], 'REPLACE')
+ ob.data.update()
+ bpy.ops.object.mode_set(mode='WEIGHT_PAINT')
+
+ # Store formula settings
+ new_formula = ob.formula_settings.add()
+ new_formula.name = ob.vertex_groups[-1].name
+ new_formula.formula = formula
+ new_formula.int_var = i_sliders
+ new_formula.float_var = f_sliders
+
+ #for f in ob.formula_settings:
+ # print(f.name, f.formula, f.int_var, f.float_var)
+ return {'FINISHED'}
+
+
+class update_weight_formula(Operator):
+ bl_idname = "object.update_weight_formula"
+ bl_label = "Update Weight Formula"
+ bl_description = "Update an existing Vertex Group. Make sure that the name\nof the active Vertex Group is a valid formula"
+ bl_options = {'REGISTER', 'UNDO'}
+
+ @classmethod
+ def poll(cls, context):
+ return len(context.object.vertex_groups) > 0
+
+ def execute(self, context):
+ ob = context.active_object
+ n_verts = len(ob.data.vertices)
+
+ vg = ob.vertex_groups.active
+ formula = vg.name
+ weight = compute_formula(ob, formula=formula)
+ if type(weight) == str:
+ self.report({'ERROR'}, "The name of the active Vertex Group\nis not a valid Formula")
+ return {'CANCELLED'}
+
+ #start_time = timeit.default_timer()
+ weight = nan_to_num(weight)
+ if type(weight) == int or type(weight) == float:
+ for i in range(n_verts):
+ vg.add([i], weight, 'REPLACE')
+ elif type(weight) == ndarray:
+ for i in range(n_verts):
+ vg.add([i], weight[i], 'REPLACE')
+ ob.data.update()
+ bpy.ops.object.mode_set(mode='WEIGHT_PAINT')
+ return {'FINISHED'}
+
+
+class _weight_laplacian(Operator):
+ bl_idname = "object._weight_laplacian"
+ bl_label = "Weight Laplacian"
+ bl_description = ("Compute the Vertex Group Laplacian")
+ bl_options = {'REGISTER', 'UNDO'}
+
+ bounds : EnumProperty(
+ items=(('MANUAL', "Manual Bounds", ""),
+ ('POSITIVE', "Positive Only", ""),
+ ('NEGATIVE', "Negative Only", ""),
+ ('AUTOMATIC', "Automatic Bounds", "")),
+ default='AUTOMATIC', name="Bounds")
+
+ mode : EnumProperty(
+ items=(('LENGTH', "Length Weight", ""),
+ ('SIMPLE', "Simple", "")),
+ default='SIMPLE', name="Evaluation Mode")
+
+ min_def : FloatProperty(
+ name="Min", default=0, soft_min=-1, soft_max=0,
+ description="Laplacian value with 0 weight")
+
+ max_def : FloatProperty(
+ name="Max", default=0.5, soft_min=0, soft_max=5,
+ description="Laplacian value with 1 weight")
+
+ bounds_string = ""
+
+ frame = None
+
+ @classmethod
+ def poll(cls, context):
+ return len(context.object.vertex_groups) > 0
+
+ def draw(self, context):
+ layout = self.layout
+ col = layout.column(align=True)
+ col.label(text="Evaluation Mode")
+ col.prop(self, "mode", text="")
+ col.label(text="Bounds")
+ col.prop(self, "bounds", text="")
+ if self.bounds == 'MANUAL':
+ col.label(text="Strain Rate \u03B5:")
+ col.prop(self, "min_def")
+ col.prop(self, "max_def")
+ col.label(text="\u03B5" + ": from " + self.bounds_string)
+
+
+ def execute(self, context):
+ try: ob = context.object
+ except:
+ self.report({'ERROR'}, "Please select an Object")
+ return {'CANCELLED'}
+
+ group_id = ob.vertex_groups.active_index
+ input_group = ob.vertex_groups[group_id].name
+
+ group_name = "Laplacian"
+ ob.vertex_groups.new(name=group_name)
+ me = ob.data
+ bm = bmesh.new()
+ bm.from_mesh(me)
+ bm.edges.ensure_lookup_table()
+
+ # store weight values
+ weight = []
+ for v in me.vertices:
+ try:
+ weight.append(ob.vertex_groups[input_group].weight(v.index))
+ except:
+ weight.append(0)
+
+ n_verts = len(bm.verts)
+ lap = [0]*n_verts
+ for e in bm.edges:
+ if self.mode == 'LENGTH':
+ length = e.calc_length()
+ if length == 0: continue
+ id0 = e.verts[0].index
+ id1 = e.verts[1].index
+ lap[id0] += weight[id1]/length - weight[id0]/length
+ lap[id1] += weight[id0]/length - weight[id1]/length
+ else:
+ id0 = e.verts[0].index
+ id1 = e.verts[1].index
+ lap[id0] += weight[id1] - weight[id0]
+ lap[id1] += weight[id0] - weight[id1]
+
+ mean_lap = mean(lap)
+ stdev_lap = stdev(lap)
+ filter_lap = [i for i in lap if mean_lap-2*stdev_lap < i < mean_lap+2*stdev_lap]
+ if self.bounds == 'MANUAL':
+ min_def = self.min_def
+ max_def = self.max_def
+ elif self.bounds == 'AUTOMATIC':
+ min_def = min(filter_lap)
+ max_def = max(filter_lap)
+ self.min_def = min_def
+ self.max_def = max_def
+ elif self.bounds == 'NEGATIVE':
+ min_def = 0
+ max_def = min(filter_lap)
+ self.min_def = min_def
+ self.max_def = max_def
+ elif self.bounds == 'POSITIVE':
+ min_def = 0
+ max_def = max(filter_lap)
+ self.min_def = min_def
+ self.max_def = max_def
+ delta_def = max_def - min_def
+
+ # check undeformed errors
+ if delta_def == 0: delta_def = 0.0001
+
+ for i in range(len(lap)):
+ val = (lap[i]-min_def)/delta_def
+ if val > 0.7: print(str(val) + " " + str(lap[i]))
+ #val = weight[i] + 0.2*lap[i]
+ ob.vertex_groups[-1].add([i], val, 'REPLACE')
+ self.bounds_string = str(round(min_def,2)) + " to " + str(round(max_def,2))
+ ob.vertex_groups[-1].name = group_name + " " + self.bounds_string
+ ob.vertex_groups.update()
+ ob.data.update()
+ bpy.ops.object.mode_set(mode='WEIGHT_PAINT')
+ bm.free()
+ return {'FINISHED'}
+
+class ok_weight_laplacian(Operator):
+ bl_idname = "object.weight_laplacian"
+ bl_label = "Weight Laplacian"
+ bl_description = ("Compute the Vertex Group Laplacian")
+ bl_options = {'REGISTER', 'UNDO'}
+
+ bounds_string = ""
+
+ frame = None
+
+ @classmethod
+ def poll(cls, context):
+ return len(context.object.vertex_groups) > 0
+
+
+ def execute(self, context):
+ try: ob = context.object
+ except:
+ self.report({'ERROR'}, "Please select an Object")
+ return {'CANCELLED'}
+
+ me = ob.data
+ bm = bmesh.new()
+ bm.from_mesh(me)
+ bm.edges.ensure_lookup_table()
+
+ group_id = ob.vertex_groups.active_index
+ input_group = ob.vertex_groups[group_id].name
+
+ group_name = "Laplacian"
+ ob.vertex_groups.new(name=group_name)
+
+ # store weight values
+ a = []
+ for v in me.vertices:
+ try:
+ a.append(ob.vertex_groups[input_group].weight(v.index))
+ except:
+ a.append(0)
+
+ a = array(a)
+
+
+ # initialize
+ n_verts = len(bm.verts)
+ # find max number of edges for vertex
+ max_edges = 0
+ n_neighbors = []
+ id_neighbors = []
+ for v in bm.verts:
+ n_edges = len(v.link_edges)
+ max_edges = max(max_edges, n_edges)
+ n_neighbors.append(n_edges)
+ neighbors = []
+ for e in v.link_edges:
+ for v1 in e.verts:
+ if v != v1: neighbors.append(v1.index)
+ id_neighbors.append(neighbors)
+ n_neighbors = array(n_neighbors)
+
+
+ lap_map = [[] for i in range(n_verts)]
+ #lap_map = []
+ '''
+ for e in bm.edges:
+ id0 = e.verts[0].index
+ id1 = e.verts[1].index
+ lap_map[id0].append(id1)
+ lap_map[id1].append(id0)
+ '''
+ lap = zeros((n_verts))#[0]*n_verts
+ n_records = zeros((n_verts))
+ for e in bm.edges:
+ id0 = e.verts[0].index
+ id1 = e.verts[1].index
+ length = e.calc_length()
+ if length == 0: continue
+ #lap[id0] += abs(a[id1] - a[id0])/length
+ #lap[id1] += abs(a[id0] - a[id1])/length
+ lap[id0] += (a[id1] - a[id0])/length
+ lap[id1] += (a[id0] - a[id1])/length
+ n_records[id0]+=1
+ n_records[id1]+=1
+ lap /= n_records
+ lap /= max(lap)
+
+ for i in range(n_verts):
+ ob.vertex_groups['Laplacian'].add([i], lap[i], 'REPLACE')
+ ob.vertex_groups.update()
+ ob.data.update()
+ bpy.ops.object.mode_set(mode='WEIGHT_PAINT')
+ bm.free()
+ return {'FINISHED'}
+
+class weight_laplacian(Operator):
+ bl_idname = "object.weight_laplacian"
+ bl_label = "Weight Laplacian"
+ bl_description = ("Compute the Vertex Group Laplacian")
+ bl_options = {'REGISTER', 'UNDO'}
+
+ bounds_string = ""
+
+ frame = None
+
+ @classmethod
+ def poll(cls, context):
+ return len(context.object.vertex_groups) > 0
+
+
+ def execute(self, context):
+ try: ob = context.object
+ except:
+ self.report({'ERROR'}, "Please select an Object")
+ return {'CANCELLED'}
+
+ me = ob.data
+ bm = bmesh.new()
+ bm.from_mesh(me)
+ bm.edges.ensure_lookup_table()
+ n_verts = len(me.vertices)
+
+ group_id = ob.vertex_groups.active_index
+ input_group = ob.vertex_groups[group_id].name
+
+ group_name = "Laplacian"
+ vg = ob.vertex_groups.new(name=group_name)
+
+ # store weight values
+ dvert_lay = bm.verts.layers.deform.active
+ weight = bmesh_get_weight_numpy(group_id, dvert_lay, bm.verts)
+
+ #verts, normals = get_vertices_and_normals_numpy(me)
+
+ #lap = zeros((n_verts))#[0]*n_verts
+ lap = [Vector((0,0,0)) for i in range(n_verts)]
+ n_records = zeros((n_verts))
+ for e in bm.edges:
+ vert0 = e.verts[0]
+ vert1 = e.verts[1]
+ id0 = vert0.index
+ id1 = vert1.index
+ v0 = vert0.co
+ v1 = vert1.co
+ v01 = v1-v0
+ v10 = -v01
+ v01 -= v01.project(vert0.normal)
+ v10 -= v10.project(vert1.normal)
+ length = e.calc_length()
+ if length == 0: continue
+ dw = (weight[id1] - weight[id0])/length
+ lap[id0] += v01.normalized() * dw
+ lap[id1] -= v10.normalized() * dw
+ n_records[id0]+=1
+ n_records[id1]+=1
+ #lap /= n_records[:,np.newaxis]
+ lap = [l.length/r for r,l in zip(n_records,lap)]
+
+ lap = np.array(lap)
+ lap /= np.max(lap)
+ lap = list(lap)
+ print(lap)
+
+ for i in range(n_verts):
+ vg.add([i], lap[i], 'REPLACE')
+ ob.vertex_groups.update()
+ ob.data.update()
+ bpy.ops.object.mode_set(mode='WEIGHT_PAINT')
+ bm.free()
+ return {'FINISHED'}
+
+
+class reaction_diffusion(Operator):
+ bl_idname = "object.reaction_diffusion"
+ bl_label = "Reaction Diffusion"
+ bl_description = ("Run a Reaction-Diffusion based on existing Vertex Groups: A and B")
+ bl_options = {'REGISTER', 'UNDO'}
+
+ steps : IntProperty(
+ name="Steps", default=10, min=0, soft_max=50,
+ description="Number of Steps")
+
+ dt : FloatProperty(
+ name="dt", default=0.2, min=0, soft_max=0.2,
+ description="Time Step")
+
+ diff_a : FloatProperty(
+ name="Diff A", default=1, min=0, soft_max=2,
+ description="Diffusion A")
+
+ diff_b : FloatProperty(
+ name="Diff B", default=0.5, min=0, soft_max=2,
+ description="Diffusion B")
+
+ f : FloatProperty(
+ name="f", default=0.055, min=0, soft_min=0.01, soft_max=0.06, max=0.1, precision=4,
+ description="Feed Rate")
+
+ k : FloatProperty(
+ name="k", default=0.062, min=0, soft_min=0.035, soft_max=0.065, max=0.1, precision=4,
+ description="Kill Rate")
+
+ bounds_string = ""
+
+ frame = None
+
+ @classmethod
+ def poll(cls, context):
+ return len(context.object.vertex_groups) > 0
+
+
+ def execute(self, context):
+ #bpy.app.handlers.frame_change_post.remove(reaction_diffusion_def)
+ reaction_diffusion_add_handler(self, context)
+ set_animatable_fix_handler(self, context)
+ try: ob = context.object
+ except:
+ self.report({'ERROR'}, "Please select an Object")
+ return {'CANCELLED'}
+
+ me = ob.data
+ bm = bmesh.new()
+ bm.from_mesh(me)
+ bm.edges.ensure_lookup_table()
+
+ # store weight values
+ a = []
+ b = []
+ for v in me.vertices:
+ try:
+ a.append(ob.vertex_groups["A"].weight(v.index))
+ except:
+ a.append(0)
+ try:
+ b.append(ob.vertex_groups["B"].weight(v.index))
+ except:
+ b.append(0)
+
+ a = array(a)
+ b = array(b)
+ f = self.f
+ k = self.k
+ diff_a = self.diff_a
+ diff_b = self.diff_b
+ dt = self.dt
+ n_verts = len(bm.verts)
+
+ for i in range(self.steps):
+
+ lap_a = zeros((n_verts))#[0]*n_verts
+ lap_b = zeros((n_verts))#[0]*n_verts
+ for e in bm.edges:
+ id0 = e.verts[0].index
+ id1 = e.verts[1].index
+ lap_a[id0] += a[id1] - a[id0]
+ lap_a[id1] += a[id0] - a[id1]
+ lap_b[id0] += b[id1] - b[id0]
+ lap_b[id1] += b[id0] - b[id1]
+ ab2 = a*b**2
+ a += (diff_a*lap_a - ab2 + f*(1-a))*dt
+ b += (diff_b*lap_b + ab2 - (k+f)*b)*dt
+
+ for i in range(n_verts):
+ ob.vertex_groups['A'].add([i], a[i], 'REPLACE')
+ ob.vertex_groups['B'].add([i], b[i], 'REPLACE')
+ ob.vertex_groups.update()
+ ob.data.update()
+
+ bpy.ops.wm.redraw_timer(type='DRAW_WIN_SWAP', iterations=1)
+
+ bpy.ops.object.mode_set(mode='WEIGHT_PAINT')
+ bm.free()
+ return {'FINISHED'}
+
+
+class edges_deformation(Operator):
+ bl_idname = "object.edges_deformation"
+ bl_label = "Edges Deformation"
+ bl_description = ("Compute Weight based on the deformation of edges"+
+ "according to visible modifiers.")
+ bl_options = {'REGISTER', 'UNDO'}
+
+ bounds : EnumProperty(
+ items=(('MANUAL', "Manual Bounds", ""),
+ ('COMPRESSION', "Compressed Only", ""),
+ ('TENSION', "Extended Only", ""),
+ ('AUTOMATIC', "Automatic Bounds", "")),
+ default='AUTOMATIC', name="Bounds")
+
+ mode : EnumProperty(
+ items=(('MAX', "Max Deformation", ""),
+ ('MEAN', "Average Deformation", "")),
+ default='MEAN', name="Evaluation Mode")
+
+ min_def : FloatProperty(
+ name="Min", default=0, soft_min=-1, soft_max=0,
+ description="Deformations with 0 weight")
+
+ max_def : FloatProperty(
+ name="Max", default=0.5, soft_min=0, soft_max=5,
+ description="Deformations with 1 weight")
+
+ bounds_string = ""
+
+ frame = None
+
+ @classmethod
+ def poll(cls, context):
+ return len(context.object.modifiers) > 0
+
+ def draw(self, context):
+ layout = self.layout
+ col = layout.column(align=True)
+ col.label(text="Evaluation Mode")
+ col.prop(self, "mode", text="")
+ col.label(text="Bounds")
+ col.prop(self, "bounds", text="")
+ if self.bounds == 'MANUAL':
+ col.label(text="Strain Rate \u03B5:")
+ col.prop(self, "min_def")
+ col.prop(self, "max_def")
+ col.label(text="\u03B5" + ": from " + self.bounds_string)
+
+ def execute(self, context):
+ try: ob = context.object
+ except:
+ self.report({'ERROR'}, "Please select an Object")
+ return {'CANCELLED'}
+
+ # check if the object is Cloth or Softbody
+ physics = False
+ for m in ob.modifiers:
+ if m.type == 'CLOTH' or m.type == 'SOFT_BODY':
+ physics = True
+ if context.scene.frame_current == 1 and self.frame != None:
+ context.scene.frame_current = self.frame
+ break
+ if not physics: self.frame = None
+
+ if self.mode == 'MEAN': group_name = "Average Deformation"
+ elif self.mode == 'MAX': group_name = "Max Deformation"
+ ob.vertex_groups.new(name=group_name)
+ me0 = ob.data
+
+ me = simple_to_mesh(ob) #ob.to_mesh(preserve_all_data_layers=True, depsgraph=bpy.context.evaluated_depsgraph_get()).copy()
+ if len(me.vertices) != len(me0.vertices) or len(me.edges) != len(me0.edges):
+ self.report({'ERROR'}, "The topology of the object should be" +
+ "unaltered")
+ return {'CANCELLED'}
+
+ bm0 = bmesh.new()
+ bm0.from_mesh(me0)
+ bm = bmesh.new()
+ bm.from_mesh(me)
+ deformations = []
+ for e0, e in zip(bm0.edges, bm.edges):
+ try:
+ l0 = e0.calc_length()
+ l1 = e.calc_length()
+ epsilon = (l1 - l0)/l0
+ deformations.append(epsilon)
+ except: deformations.append(1)
+ v_deformations = []
+ for v in bm.verts:
+ vdef = []
+ for e in v.link_edges:
+ vdef.append(deformations[e.index])
+ if self.mode == 'MEAN': v_deformations.append(mean(vdef))
+ elif self.mode == 'MAX': v_deformations.append(max(vdef, key=abs))
+ #elif self.mode == 'MIN': v_deformations.append(min(vdef, key=abs))
+
+ if self.bounds == 'MANUAL':
+ min_def = self.min_def
+ max_def = self.max_def
+ elif self.bounds == 'AUTOMATIC':
+ min_def = min(v_deformations)
+ max_def = max(v_deformations)
+ self.min_def = min_def
+ self.max_def = max_def
+ elif self.bounds == 'COMPRESSION':
+ min_def = 0
+ max_def = min(v_deformations)
+ self.min_def = min_def
+ self.max_def = max_def
+ elif self.bounds == 'TENSION':
+ min_def = 0
+ max_def = max(v_deformations)
+ self.min_def = min_def
+ self.max_def = max_def
+ delta_def = max_def - min_def
+
+ # check undeformed errors
+ if delta_def == 0:
+ if self.bounds == 'MANUAL':
+ delta_def = 0.0001
+ else:
+ message = "The object doesn't have deformations."
+ if physics:
+ message = message + ("\nIf you are using Physics try to " +
+ "save it in the cache before.")
+ self.report({'ERROR'}, message)
+ return {'CANCELLED'}
+ else:
+ if physics:
+ self.frame = context.scene.frame_current
+
+ for i in range(len(v_deformations)):
+ weight = (v_deformations[i] - min_def)/delta_def
+ ob.vertex_groups[-1].add([i], weight, 'REPLACE')
+ self.bounds_string = str(round(min_def,2)) + " to " + str(round(max_def,2))
+ ob.vertex_groups[-1].name = group_name + " " + self.bounds_string
+ ob.vertex_groups.update()
+ ob.data.update()
+ bpy.ops.object.mode_set(mode='WEIGHT_PAINT')
+ bpy.data.meshes.remove(me)
+ bm.free()
+ bm0.free()
+ return {'FINISHED'}
+
+class edges_bending(Operator):
+ bl_idname = "object.edges_bending"
+ bl_label = "Edges Bending"
+ bl_description = ("Compute Weight based on the bending of edges"+
+ "according to visible modifiers.")
+ bl_options = {'REGISTER', 'UNDO'}
+
+ bounds : EnumProperty(
+ items=(('MANUAL', "Manual Bounds", ""),
+ ('POSITIVE', "Positive Only", ""),
+ ('NEGATIVE', "Negative Only", ""),
+ ('UNSIGNED', "Absolute Bending", ""),
+ ('AUTOMATIC', "Signed Bending", "")),
+ default='AUTOMATIC', name="Bounds")
+
+ min_def : FloatProperty(
+ name="Min", default=-10, soft_min=-45, soft_max=45,
+ description="Deformations with 0 weight")
+
+ max_def : FloatProperty(
+ name="Max", default=10, soft_min=-45, soft_max=45,
+ description="Deformations with 1 weight")
+
+ bounds_string = ""
+ frame = None
+
+ @classmethod
+ def poll(cls, context):
+ return len(context.object.modifiers) > 0
+
+ def draw(self, context):
+ layout = self.layout
+ layout.label(text="Bounds")
+ layout.prop(self, "bounds", text="")
+ if self.bounds == 'MANUAL':
+ layout.prop(self, "min_def")
+ layout.prop(self, "max_def")
+
+ def execute(self, context):
+ try: ob = context.object
+ except:
+ self.report({'ERROR'}, "Please select an Object")
+ return {'CANCELLED'}
+
+ group_name = "Edges Bending"
+ ob.vertex_groups.new(name=group_name)
+
+ # check if the object is Cloth or Softbody
+ physics = False
+ for m in ob.modifiers:
+ if m.type == 'CLOTH' or m.type == 'SOFT_BODY':
+ physics = True
+ if context.scene.frame_current == 1 and self.frame != None:
+ context.scene.frame_current = self.frame
+ break
+ if not physics: self.frame = None
+
+ #ob.data.update()
+ #context.scene.update()
+ me0 = ob.data
+ me = simple_to_mesh(ob) #ob.to_mesh(preserve_all_data_layers=True, depsgraph=bpy.context.evaluated_depsgraph_get()).copy()
+ if len(me.vertices) != len(me0.vertices) or len(me.edges) != len(me0.edges):
+ self.report({'ERROR'}, "The topology of the object should be" +
+ "unaltered")
+ bm0 = bmesh.new()
+ bm0.from_mesh(me0)
+ bm = bmesh.new()
+ bm.from_mesh(me)
+ deformations = []
+ for e0, e in zip(bm0.edges, bm.edges):
+ try:
+ ang = e.calc_face_angle_signed()
+ ang0 = e0.calc_face_angle_signed()
+ if self.bounds == 'UNSIGNED':
+ deformations.append(abs(ang-ang0))
+ else:
+ deformations.append(ang-ang0)
+ except: deformations.append(0)
+ v_deformations = []
+ for v in bm.verts:
+ vdef = []
+ for e in v.link_edges:
+ vdef.append(deformations[e.index])
+ v_deformations.append(mean(vdef))
+ if self.bounds == 'MANUAL':
+ min_def = radians(self.min_def)
+ max_def = radians(self.max_def)
+ elif self.bounds == 'AUTOMATIC':
+ min_def = min(v_deformations)
+ max_def = max(v_deformations)
+ elif self.bounds == 'POSITIVE':
+ min_def = 0
+ max_def = min(v_deformations)
+ elif self.bounds == 'NEGATIVE':
+ min_def = 0
+ max_def = max(v_deformations)
+ elif self.bounds == 'UNSIGNED':
+ min_def = 0
+ max_def = max(v_deformations)
+ delta_def = max_def - min_def
+
+ # check undeformed errors
+ if delta_def == 0:
+ if self.bounds == 'MANUAL':
+ delta_def = 0.0001
+ else:
+ message = "The object doesn't have deformations."
+ if physics:
+ message = message + ("\nIf you are using Physics try to " +
+ "save it in the cache before.")
+ self.report({'ERROR'}, message)
+ return {'CANCELLED'}
+ else:
+ if physics:
+ self.frame = context.scene.frame_current
+
+ for i in range(len(v_deformations)):
+ weight = (v_deformations[i] - min_def)/delta_def
+ ob.vertex_groups[-1].add([i], weight, 'REPLACE')
+ self.bounds_string = str(round(min_def,2)) + " to " + str(round(max_def,2))
+ ob.vertex_groups[-1].name = group_name + " " + self.bounds_string
+ ob.vertex_groups.update()
+ ob.data.update()
+ bpy.ops.object.mode_set(mode='WEIGHT_PAINT')
+ bpy.data.meshes.remove(me)
+ bm0.free()
+ bm.free()
+ return {'FINISHED'}
+
+class weight_contour_displace(Operator):
+ bl_idname = "object.weight_contour_displace"
+ bl_label = "Contour Displace"
+ bl_description = ("")
+ bl_options = {'REGISTER', 'UNDO'}
+
+ use_modifiers : BoolProperty(
+ name="Use Modifiers", default=True,
+ description="Apply all the modifiers")
+ min_iso : FloatProperty(
+ name="Min Iso Value", default=0.49, min=0, max=1,
+ description="Threshold value")
+ max_iso : FloatProperty(
+ name="Max Iso Value", default=0.51, min=0, max=1,
+ description="Threshold value")
+ n_cuts : IntProperty(
+ name="Cuts", default=2, min=1, soft_max=10,
+ description="Number of cuts in the selected range of values")
+ bool_displace : BoolProperty(
+ name="Add Displace", default=True, description="Add Displace Modifier")
+ bool_flip : BoolProperty(
+ name="Flip", default=False, description="Flip Output Weight")
+
+ weight_mode : EnumProperty(
+ items=[('Remapped', 'Remapped', 'Remap values'),
+ ('Alternate', 'Alternate', 'Alternate 0 and 1'),
+ ('Original', 'Original', 'Keep original Vertex Group')],
+ name="Weight", description="Choose how to convert vertex group",
+ default="Remapped", options={'LIBRARY_EDITABLE'})
+
+ @classmethod
+ def poll(cls, context):
+ return len(context.object.vertex_groups) > 0
+
+ def invoke(self, context, event):
+ return context.window_manager.invoke_props_dialog(self, width=350)
+
+ def execute(self, context):
+ start_time = timeit.default_timer()
+ try:
+ check = context.object.vertex_groups[0]
+ except:
+ self.report({'ERROR'}, "The object doesn't have Vertex Groups")
+ return {'CANCELLED'}
+
+ ob0 = context.object
+
+ group_id = ob0.vertex_groups.active_index
+ vertex_group_name = ob0.vertex_groups[group_id].name
+
+ bpy.ops.object.mode_set(mode='EDIT')
+ bpy.ops.mesh.select_all(action='SELECT')
+ bpy.ops.object.mode_set(mode='OBJECT')
+ if self.use_modifiers:
+ #me0 = ob0.to_mesh(preserve_all_data_layers=True, depsgraph=bpy.context.evaluated_depsgraph_get()).copy()
+ me0 = simple_to_mesh(ob0)
+ else:
+ me0 = ob0.data.copy()
+
+ # generate new bmesh
+ bm = bmesh.new()
+ bm.from_mesh(me0)
+ bm.verts.ensure_lookup_table()
+ bm.edges.ensure_lookup_table()
+ bm.faces.ensure_lookup_table()
+
+ # store weight values
+ weight = []
+ ob = bpy.data.objects.new("temp", me0)
+ for g in ob0.vertex_groups:
+ ob.vertex_groups.new(name=g.name)
+ for v in me0.vertices:
+ try:
+ weight.append(ob.vertex_groups[vertex_group_name].weight(v.index))
+ except:
+ weight.append(0)
+
+ # define iso values
+ iso_values = []
+ for i_cut in range(self.n_cuts):
+ delta_iso = abs(self.max_iso - self.min_iso)
+ min_iso = min(self.min_iso, self.max_iso)
+ max_iso = max(self.min_iso, self.max_iso)
+ if delta_iso == 0: iso_val = min_iso
+ elif self.n_cuts > 1: iso_val = i_cut/(self.n_cuts-1)*delta_iso + min_iso
+ else: iso_val = (self.max_iso + self.min_iso)/2
+ iso_values.append(iso_val)
+
+ # Start Cuts Iterations
+ filtered_edges = bm.edges
+ for iso_val in iso_values:
+ delete_edges = []
+
+ faces_mask = []
+ for f in bm.faces:
+ w_min = 2
+ w_max = 2
+ for v in f.verts:
+ w = weight[v.index]
+ if w_min == 2:
+ w_max = w_min = w
+ if w > w_max: w_max = w
+ if w < w_min: w_min = w
+ if w_min < iso_val and w_max > iso_val:
+ faces_mask.append(f)
+ break
+
+ #link_faces = [[f for f in e.link_faces] for e in bm.edges]
+
+ #faces_todo = [f.select for f in bm.faces]
+ #faces_todo = [True for f in bm.faces]
+ verts = []
+ edges = []
+ edges_id = {}
+ _filtered_edges = []
+ n_verts = len(bm.verts)
+ count = n_verts
+ for e in filtered_edges:
+ #id0 = e.vertices[0]
+ #id1 = e.vertices[1]
+ id0 = e.verts[0].index
+ id1 = e.verts[1].index
+ w0 = weight[id0]
+ w1 = weight[id1]
+
+ if w0 == w1: continue
+ elif w0 > iso_val and w1 > iso_val:
+ _filtered_edges.append(e)
+ continue
+ elif w0 < iso_val and w1 < iso_val: continue
+ elif w0 == iso_val or w1 == iso_val:
+ _filtered_edges.append(e)
+ continue
+ else:
+ v0 = bm.verts[id0].co
+ v1 = bm.verts[id1].co
+ v = v0.lerp(v1, (iso_val-w0)/(w1-w0))
+ if e not in delete_edges:
+ delete_edges.append(e)
+ verts.append(v)
+ edges_id[str(id0)+"_"+str(id1)] = count
+ edges_id[str(id1)+"_"+str(id0)] = count
+ count += 1
+ _filtered_edges.append(e)
+ filtered_edges = _filtered_edges
+ splitted_faces = []
+
+ switch = False
+ # splitting faces
+ for f in faces_mask:
+ # create sub-faces slots. Once a new vertex is reached it will
+ # change slot, storing the next vertices for a new face.
+ build_faces = [[],[]]
+ #switch = False
+ verts0 = [v.index for v in f.verts]
+ verts1 = list(verts0)
+ verts1.append(verts1.pop(0)) # shift list
+ for id0, id1 in zip(verts0, verts1):
+
+ # add first vertex to active slot
+ build_faces[switch].append(id0)
+
+ # try to split edge
+ try:
+ # check if the edge must be splitted
+ new_vert = edges_id[str(id0)+"_"+str(id1)]
+ # add new vertex
+ build_faces[switch].append(new_vert)
+ # if there is an open face on the other slot
+ if len(build_faces[not switch]) > 0:
+ # store actual face
+ splitted_faces.append(build_faces[switch])
+ # reset actual faces and switch
+ build_faces[switch] = []
+ # change face slot
+ switch = not switch
+ # continue previous face
+ build_faces[switch].append(new_vert)
+ except: pass
+ if len(build_faces[not switch]) == 2:
+ build_faces[not switch].append(id0)
+ if len(build_faces[not switch]) > 2:
+ splitted_faces.append(build_faces[not switch])
+ # add last face
+ splitted_faces.append(build_faces[switch])
+ #del_faces.append(f.index)
+
+ # adding new vertices
+ _new_vert = bm.verts.new
+ for v in verts: new_vert = _new_vert(v)
+ bm.verts.index_update()
+ bm.verts.ensure_lookup_table()
+ # adding new faces
+ _new_face = bm.faces.new
+ missed_faces = []
+ added_faces = []
+ for f in splitted_faces:
+ try:
+ face_verts = [bm.verts[i] for i in f]
+ new_face = _new_face(face_verts)
+ for e in new_face.edges:
+ filtered_edges.append(e)
+ except:
+ missed_faces.append(f)
+
+ bm.faces.ensure_lookup_table()
+ # updating weight values
+ weight = weight + [iso_val]*len(verts)
+
+ # deleting old edges/faces
+ _remove_edge = bm.edges.remove
+ bm.edges.ensure_lookup_table()
+ for e in delete_edges:
+ _remove_edge(e)
+ _filtered_edges = []
+ for e in filtered_edges:
+ if e not in delete_edges: _filtered_edges.append(e)
+ filtered_edges = _filtered_edges
+
+ name = ob0.name + '_ContourDisp'
+ me = bpy.data.meshes.new(name)
+ bm.to_mesh(me)
+ bm.free()
+ ob = bpy.data.objects.new(name, me)
+
+ # Link object to scene and make active
+ scn = context.scene
+ context.collection.objects.link(ob)
+ context.view_layer.objects.active = ob
+ ob.select_set(True)
+ ob0.select_set(False)
+
+ # generate new vertex group
+ for g in ob0.vertex_groups:
+ ob.vertex_groups.new(name=g.name)
+ #ob.vertex_groups.new(name=vertex_group_name)
+
+ all_weight = weight + [iso_val]*len(verts)
+ #mult = 1/(1-iso_val)
+ for id in range(len(all_weight)):
+ #if False: w = (all_weight[id]-iso_val)*mult
+ w = all_weight[id]
+ if self.weight_mode == 'Alternate':
+ direction = self.bool_flip
+ for i in range(len(iso_values)-1):
+ val0, val1 = iso_values[i], iso_values[i+1]
+ if val0 < w <= val1:
+ if direction: w1 = (w-val0)/(val1-val0)
+ else: w1 = (val1-w)/(val1-val0)
+ direction = not direction
+ if w < iso_values[0]: w1 = not self.bool_flip
+ if w > iso_values[-1]: w1 = not direction
+ elif self.weight_mode == 'Remapped':
+ if w < min_iso: w1 = 0
+ elif w > max_iso: w1 = 1
+ else: w1 = (w - min_iso)/delta_iso
+ else:
+ if self.bool_flip: w1 = 1-w
+ else: w1 = w
+ ob.vertex_groups[vertex_group_name].add([id], w1, 'REPLACE')
+
+ ob.vertex_groups.active_index = group_id
+
+ # align new object
+ ob.matrix_world = ob0.matrix_world
+
+ # Displace Modifier
+ if self.bool_displace:
+ ob.modifiers.new(type='DISPLACE', name='Displace')
+ ob.modifiers["Displace"].mid_level = 0
+ ob.modifiers["Displace"].strength = 0.1
+ ob.modifiers['Displace'].vertex_group = vertex_group_name
+
+ bpy.ops.object.mode_set(mode='EDIT')
+ bpy.ops.object.mode_set(mode='WEIGHT_PAINT')
+ print("Contour Displace time: " + str(timeit.default_timer() - start_time) + " sec")
+
+ bpy.data.meshes.remove(me0)
+
+ return {'FINISHED'}
+
+class weight_contour_mask(Operator):
+ bl_idname = "object.weight_contour_mask"
+ bl_label = "Contour Mask"
+ bl_description = ("")
+ bl_options = {'REGISTER', 'UNDO'}
+
+ use_modifiers : BoolProperty(
+ name="Use Modifiers", default=True,
+ description="Apply all the modifiers")
+ iso : FloatProperty(
+ name="Iso Value", default=0.5, soft_min=0, soft_max=1,
+ description="Threshold value")
+ bool_solidify : BoolProperty(
+ name="Solidify", default=True, description="Add Solidify Modifier")
+ offset : FloatProperty(
+ name="Offset", default=1, min=0, max=1,
+ description="Offset")
+ thickness : FloatProperty(
+ name="Thickness", default=0.5, soft_min=0, soft_max=1,
+ description="Thickness")
+ normalize_weight : BoolProperty(
+ name="Normalize Weight", default=True,
+ description="Normalize weight of remaining vertices")
+
+ @classmethod
+ def poll(cls, context):
+ return len(context.object.vertex_groups) > 0
+
+ def invoke(self, context, event):
+ return context.window_manager.invoke_props_dialog(self, width=350)
+
+ def execute(self, context):
+ start_time = timeit.default_timer()
+ try:
+ check = context.object.vertex_groups[0]
+ except:
+ self.report({'ERROR'}, "The object doesn't have Vertex Groups")
+ return {'CANCELLED'}
+
+ ob0 = bpy.context.object
+
+ iso_val = self.iso
+ group_id = ob0.vertex_groups.active_index
+ vertex_group_name = ob0.vertex_groups[group_id].name
+
+ bpy.ops.object.mode_set(mode='EDIT')
+ bpy.ops.mesh.select_all(action='SELECT')
+ bpy.ops.object.mode_set(mode='OBJECT')
+ if self.use_modifiers:
+ me0 = simple_to_mesh(ob0)#ob0.to_mesh(preserve_all_data_layers=True, depsgraph=bpy.context.evaluated_depsgraph_get()).copy()
+ else:
+ me0 = ob0.data.copy()
+
+ # generate new bmesh
+ bm = bmesh.new()
+ bm.from_mesh(me0)
+ bm.verts.ensure_lookup_table()
+ bm.edges.ensure_lookup_table()
+ bm.faces.ensure_lookup_table()
+
+ # store weight values
+ weight = []
+ ob = bpy.data.objects.new("temp", me0)
+ for g in ob0.vertex_groups:
+ ob.vertex_groups.new(name=g.name)
+ for v in me0.vertices:
+ try:
+ #weight.append(v.groups[vertex_group_name].weight)
+ weight.append(ob.vertex_groups[vertex_group_name].weight(v.index))
+ except:
+ weight.append(0)
+
+ faces_mask = []
+ for f in bm.faces:
+ w_min = 2
+ w_max = 2
+ for v in f.verts:
+ w = weight[v.index]
+ if w_min == 2:
+ w_max = w_min = w
+ if w > w_max: w_max = w
+ if w < w_min: w_min = w
+ if w_min < iso_val and w_max > iso_val:
+ faces_mask.append(f)
+ break
+
+ filtered_edges = bm.edges# me0.edges
+ faces_todo = [f.select for f in bm.faces]
+ verts = []
+ edges = []
+ delete_edges = []
+ edges_id = {}
+ _filtered_edges = []
+ n_verts = len(bm.verts)
+ count = n_verts
+ for e in filtered_edges:
+ id0 = e.verts[0].index
+ id1 = e.verts[1].index
+ w0 = weight[id0]
+ w1 = weight[id1]
+
+ if w0 == w1: continue
+ elif w0 > iso_val and w1 > iso_val:
+ continue
+ elif w0 < iso_val and w1 < iso_val: continue
+ elif w0 == iso_val or w1 == iso_val: continue
+ else:
+ v0 = me0.vertices[id0].co
+ v1 = me0.vertices[id1].co
+ v = v0.lerp(v1, (iso_val-w0)/(w1-w0))
+ delete_edges.append(e)
+ verts.append(v)
+ edges_id[str(id0)+"_"+str(id1)] = count
+ edges_id[str(id1)+"_"+str(id0)] = count
+ count += 1
+
+ splitted_faces = []
+
+ switch = False
+ # splitting faces
+ for f in faces_mask:
+ # create sub-faces slots. Once a new vertex is reached it will
+ # change slot, storing the next vertices for a new face.
+ build_faces = [[],[]]
+ #switch = False
+ verts0 = list(me0.polygons[f.index].vertices)
+ verts1 = list(verts0)
+ verts1.append(verts1.pop(0)) # shift list
+ for id0, id1 in zip(verts0, verts1):
+
+ # add first vertex to active slot
+ build_faces[switch].append(id0)
+
+ # try to split edge
+ try:
+ # check if the edge must be splitted
+ new_vert = edges_id[str(id0)+"_"+str(id1)]
+ # add new vertex
+ build_faces[switch].append(new_vert)
+ # if there is an open face on the other slot
+ if len(build_faces[not switch]) > 0:
+ # store actual face
+ splitted_faces.append(build_faces[switch])
+ # reset actual faces and switch
+ build_faces[switch] = []
+ # change face slot
+ switch = not switch
+ # continue previous face
+ build_faces[switch].append(new_vert)
+ except: pass
+ if len(build_faces[not switch]) == 2:
+ build_faces[not switch].append(id0)
+ if len(build_faces[not switch]) > 2:
+ splitted_faces.append(build_faces[not switch])
+ # add last face
+ splitted_faces.append(build_faces[switch])
+
+ # adding new vertices
+ _new_vert = bm.verts.new
+ for v in verts: _new_vert(v)
+ bm.verts.ensure_lookup_table()
+
+ # deleting old edges/faces
+ _remove_edge = bm.edges.remove
+ bm.edges.ensure_lookup_table()
+ remove_edges = []
+ for e in delete_edges: _remove_edge(e)
+
+ bm.verts.ensure_lookup_table()
+ # adding new faces
+ _new_face = bm.faces.new
+ missed_faces = []
+ for f in splitted_faces:
+ try:
+ face_verts = [bm.verts[i] for i in f]
+ _new_face(face_verts)
+ except:
+ missed_faces.append(f)
+
+ # Mask geometry
+ if(True):
+ _remove_vert = bm.verts.remove
+ all_weight = weight + [iso_val+0.0001]*len(verts)
+ weight = []
+ for w, v in zip(all_weight, bm.verts):
+ if w < iso_val: _remove_vert(v)
+ else: weight.append(w)
+
+ # Create mesh and object
+ name = ob0.name + '_ContourMask_{:.3f}'.format(iso_val)
+ me = bpy.data.meshes.new(name)
+ bm.to_mesh(me)
+ bm.free()
+ ob = bpy.data.objects.new(name, me)
+
+ # Link object to scene and make active
+ scn = context.scene
+ context.collection.objects.link(ob)
+ context.view_layer.objects.active = ob
+ ob.select_set(True)
+ ob0.select_set(False)
+
+ # generate new vertex group
+ for g in ob0.vertex_groups:
+ ob.vertex_groups.new(name=g.name)
+
+ if iso_val != 1: mult = 1/(1-iso_val)
+ else: mult = 1
+ for id in range(len(weight)):
+ if self.normalize_weight: w = (weight[id]-iso_val)*mult
+ else: w = weight[id]
+ ob.vertex_groups[vertex_group_name].add([id], w, 'REPLACE')
+ ob.vertex_groups.active_index = group_id
+
+ # align new object
+ ob.matrix_world = ob0.matrix_world
+
+ # Add Solidify
+ if self.bool_solidify and True:
+ ob.modifiers.new(type='SOLIDIFY', name='Solidify')
+ ob.modifiers['Solidify'].thickness = self.thickness
+ ob.modifiers['Solidify'].offset = self.offset
+ ob.modifiers['Solidify'].vertex_group = vertex_group_name
+
+ bpy.ops.object.mode_set(mode='EDIT')
+ bpy.ops.object.mode_set(mode='WEIGHT_PAINT')
+ print("Contour Mask time: " + str(timeit.default_timer() - start_time) + " sec")
+
+ bpy.data.meshes.remove(me0)
+
+ return {'FINISHED'}
+
+
+class weight_contour_mask_wip(Operator):
+ bl_idname = "object.weight_contour_mask"
+ bl_label = "Contour Mask"
+ bl_description = ("")
+ bl_options = {'REGISTER', 'UNDO'}
+
+ use_modifiers : BoolProperty(
+ name="Use Modifiers", default=True,
+ description="Apply all the modifiers")
+ iso : FloatProperty(
+ name="Iso Value", default=0.5, soft_min=0, soft_max=1,
+ description="Threshold value")
+ bool_solidify : BoolProperty(
+ name="Solidify", default=True, description="Add Solidify Modifier")
+ normalize_weight : BoolProperty(
+ name="Normalize Weight", default=True,
+ description="Normalize weight of remaining vertices")
+
+ @classmethod
+ def poll(cls, context):
+ return len(context.object.vertex_groups) > 0
+
+ def execute(self, context):
+ start_time = timeit.default_timer()
+ try:
+ check = context.object.vertex_groups[0]
+ except:
+ self.report({'ERROR'}, "The object doesn't have Vertex Groups")
+ return {'CANCELLED'}
+
+ ob0 = bpy.context.object
+
+ iso_val = self.iso
+ group_id = ob0.vertex_groups.active_index
+ vertex_group_name = ob0.vertex_groups[group_id].name
+
+ #bpy.ops.object.mode_set(mode='EDIT')
+ #bpy.ops.mesh.select_all(action='SELECT')
+ #bpy.ops.object.mode_set(mode='OBJECT')
+ if self.use_modifiers:
+ me0 = simple_to_mesh(ob0)#ob0.to_mesh(preserve_all_data_layers=True, depsgraph=bpy.context.evaluated_depsgraph_get()).copy()
+ else:
+ me0 = ob0.data.copy()
+
+ # generate new bmesh
+ bm = bmesh.new()
+ bm.from_mesh(me0)
+
+ # store weight values
+ weight = []
+ ob = bpy.data.objects.new("temp", me0)
+ for g in ob0.vertex_groups:
+ ob.vertex_groups.new(name=g.name)
+ weight = get_weight_numpy(ob.vertex_groups[vertex_group_name], len(me0.vertices))
+
+ me0, bm, weight = contour_bmesh(me0, bm, weight, iso_val)
+
+ # Mask geometry
+ mask = weight >= iso_val
+ weight = weight[mask]
+ mask = np.logical_not(mask)
+ delete_verts = np.array(bm.verts)[mask]
+ #for v in delete_verts: bm.verts.remove(v)
+
+ # Create mesh and object
+ name = ob0.name + '_ContourMask_{:.3f}'.format(iso_val)
+ me = bpy.data.meshes.new(name)
+ bm.to_mesh(me)
+ bm.free()
+ ob = bpy.data.objects.new(name, me)
+
+ # Link object to scene and make active
+ scn = context.scene
+ context.collection.objects.link(ob)
+ context.view_layer.objects.active = ob
+ ob.select_set(True)
+ ob0.select_set(False)
+
+ # generate new vertex group
+ for g in ob0.vertex_groups:
+ ob.vertex_groups.new(name=g.name)
+
+ if iso_val != 1: mult = 1/(1-iso_val)
+ else: mult = 1
+ for id in range(len(weight)):
+ if self.normalize_weight: w = (weight[id]-iso_val)*mult
+ else: w = weight[id]
+ ob.vertex_groups[vertex_group_name].add([id], w, 'REPLACE')
+ ob.vertex_groups.active_index = group_id
+
+ # align new object
+ ob.matrix_world = ob0.matrix_world
+
+ # Add Solidify
+ if self.bool_solidify and True:
+ ob.modifiers.new(type='SOLIDIFY', name='Solidify')
+ ob.modifiers['Solidify'].thickness = 0.05
+ ob.modifiers['Solidify'].offset = 0
+ ob.modifiers['Solidify'].vertex_group = vertex_group_name
+
+ bpy.ops.object.mode_set(mode='EDIT')
+ bpy.ops.object.mode_set(mode='WEIGHT_PAINT')
+ print("Contour Mask time: " + str(timeit.default_timer() - start_time) + " sec")
+
+ bpy.data.meshes.remove(me0)
+
+ return {'FINISHED'}
+
+
+class weight_contour_curves(Operator):
+ bl_idname = "object.weight_contour_curves"
+ bl_label = "Contour Curves"
+ bl_description = ("")
+ bl_options = {'REGISTER', 'UNDO'}
+
+ use_modifiers : BoolProperty(
+ name="Use Modifiers", default=True,
+ description="Apply all the modifiers")
+
+ min_iso : FloatProperty(
+ name="Min Value", default=0., soft_min=0, soft_max=1,
+ description="Minimum weight value")
+ max_iso : FloatProperty(
+ name="Max Value", default=1, soft_min=0, soft_max=1,
+ description="Maximum weight value")
+ n_curves : IntProperty(
+ name="Curves", default=3, soft_min=1, soft_max=10,
+ description="Number of Contour Curves")
+
+ min_rad : FloatProperty(
+ name="Min Radius", default=1, soft_min=0, soft_max=1,
+ description="Change radius according to Iso Value")
+ max_rad : FloatProperty(
+ name="Max Radius", default=1, soft_min=0, soft_max=1,
+ description="Change radius according to Iso Value")
+
+ @classmethod
+ def poll(cls, context):
+ ob = context.object
+ return len(ob.vertex_groups) > 0 or ob.type == 'CURVE'
+
+ def invoke(self, context, event):
+ return context.window_manager.invoke_props_dialog(self, width=350)
+
+ def execute(self, context):
+ start_time = timeit.default_timer()
+ try:
+ check = context.object.vertex_groups[0]
+ except:
+ self.report({'ERROR'}, "The object doesn't have Vertex Groups")
+ return {'CANCELLED'}
+ ob0 = context.object
+
+ group_id = ob0.vertex_groups.active_index
+ vertex_group_name = ob0.vertex_groups[group_id].name
+
+ bpy.ops.object.mode_set(mode='EDIT')
+ bpy.ops.mesh.select_all(action='SELECT')
+ bpy.ops.object.mode_set(mode='OBJECT')
+ if self.use_modifiers:
+ me0 = simple_to_mesh(ob0) #ob0.to_mesh(preserve_all_data_layers=True, depsgraph=bpy.context.evaluated_depsgraph_get()).copy()
+ else:
+ me0 = ob0.data.copy()
+
+ # generate new bmesh
+ bm = bmesh.new()
+ bm.from_mesh(me0)
+ bm.verts.ensure_lookup_table()
+ bm.edges.ensure_lookup_table()
+ bm.faces.ensure_lookup_table()
+
+ # store weight values
+ weight = []
+ ob = bpy.data.objects.new("temp", me0)
+ for g in ob0.vertex_groups:
+ ob.vertex_groups.new(name=g.name)
+ weight = get_weight_numpy(ob.vertex_groups[vertex_group_name], len(bm.verts))
+
+ #filtered_edges = bm.edges
+ total_verts = np.zeros((0,3))
+ total_segments = []
+ radius = []
+
+ # start iterate contours levels
+ vertices = get_vertices_numpy(me0)
+ filtered_edges = get_edges_id_numpy(me0)
+
+ faces_weight = [np.array([weight[v] for v in p.vertices]) for p in me0.polygons]
+ fw_min = np.array([np.min(fw) for fw in faces_weight])
+ fw_max = np.array([np.max(fw) for fw in faces_weight])
+
+ bm_faces = np.array(bm.faces)
+
+ ### Spiral
+ normals = np.array([v.normal for v in me0.vertices])
+
+ for c in range(self.n_curves):
+ min_iso = min(self.min_iso, self.max_iso)
+ max_iso = max(self.min_iso, self.max_iso)
+ try:
+ delta_iso = (max_iso-min_iso)/(self.n_curves-1)
+ iso_val = c*delta_iso + min_iso
+ if iso_val < 0: iso_val = (min_iso + max_iso)/2
+ except:
+ iso_val = (min_iso + max_iso)/2
+
+ # remove passed faces
+ bool_mask = iso_val < fw_max
+ bm_faces = bm_faces[bool_mask]
+ fw_min = fw_min[bool_mask]
+ fw_max = fw_max[bool_mask]
+
+ # mask faces
+ bool_mask = fw_min < iso_val
+ faces_mask = bm_faces[bool_mask]
+
+ n_verts = len(bm.verts)
+ count = len(total_verts)
+
+ # vertices indexes
+ id0 = filtered_edges[:,0]
+ id1 = filtered_edges[:,1]
+ # vertices weight
+ w0 = weight[id0]
+ w1 = weight[id1]
+ # weight condition
+ bool_w0 = w0 < iso_val
+ bool_w1 = w1 < iso_val
+
+ # mask all edges that have one weight value below the iso value
+ mask_new_verts = np.logical_xor(bool_w0, bool_w1)
+
+ id0 = id0[mask_new_verts]
+ id1 = id1[mask_new_verts]
+ # filter arrays
+ v0 = vertices[id0]
+ v1 = vertices[id1]
+ w0 = w0[mask_new_verts]
+ w1 = w1[mask_new_verts]
+ div = (w1-w0)
+ if div == 0: div = 0.000001
+
+ param = np.expand_dims((iso_val-w0)/div,axis=1)
+ verts = v0 + (v1-v0)*param
+
+ # indexes of edges with new vertices
+ edges_index = filtered_edges[mask_new_verts][:,2]
+ edges_id = {}
+ for i, id in enumerate(edges_index): edges_id[id] = i+len(total_verts)
+
+ # remove all edges completely below the iso value
+ mask_edges = np.logical_not(np.logical_and(bool_w0, bool_w1))
+ filtered_edges = filtered_edges[mask_edges]
+ if len(verts) == 0: continue
+
+ # finding segments
+ segments = []
+ for f in faces_mask:
+ seg = []
+ for e in f.edges:
+ try:
+ seg.append(edges_id[e.index])
+ if len(seg) == 2:
+ segments.append(seg)
+ seg = []
+ except: pass
+
+
+ #curves_points_indexes = find_curves(segments)
+ total_segments = total_segments + segments
+ total_verts = np.concatenate((total_verts,verts))
+
+ if self.min_rad != self.max_rad:
+ try:
+ iso_rad = c*(self.max_rad-self.min_rad)/(self.n_curves-1)+self.min_rad
+ if iso_rad < 0: iso_rad = (self.min_rad + self.max_rad)/2
+ except:
+ iso_rad = (self.min_rad + self.max_rad)/2
+ radius = radius + [iso_rad]*len(verts)
+ print("Contour Curves, computing time: " + str(timeit.default_timer() - start_time) + " sec")
+ bm.free()
+ bm = bmesh.new()
+ # adding new vertices _local for fast access
+ _new_vert = bm.verts.new
+ for v in total_verts: _new_vert(v)
+ bm.verts.ensure_lookup_table()
+
+ # adding new edges
+ _new_edge = bm.edges.new
+ for s in total_segments:
+ try:
+ pts = [bm.verts[i] for i in s]
+ _new_edge(pts)
+ except: pass
+
+
+ try:
+ name = ob0.name + '_ContourCurves'
+ me = bpy.data.meshes.new(name)
+ bm.to_mesh(me)
+ bm.free()
+ ob = bpy.data.objects.new(name, me)
+ # Link object to scene and make active
+ scn = context.scene
+ context.collection.objects.link(ob)
+ context.view_layer.objects.active = ob
+ ob.select_set(True)
+ ob0.select_set(False)
+
+ print("Contour Curves, bmesh time: " + str(timeit.default_timer() - start_time) + " sec")
+ bpy.ops.object.convert(target='CURVE')
+ ob = context.object
+ if not (self.min_rad == 0 and self.max_rad == 0):
+ if self.min_rad != self.max_rad:
+ count = 0
+ for s in ob.data.splines:
+ for p in s.points:
+ p.radius = radius[count]
+ count += 1
+ else:
+ for s in ob.data.splines:
+ for p in s.points:
+ p.radius = self.min_rad
+ ob.data.bevel_depth = 0.01
+ ob.data.fill_mode = 'FULL'
+ ob.data.bevel_resolution = 3
+ except:
+ self.report({'ERROR'}, "There are no values in the chosen range")
+ return {'CANCELLED'}
+
+ # align new object
+ ob.matrix_world = ob0.matrix_world
+ print("Contour Curves time: " + str(timeit.default_timer() - start_time) + " sec")
+
+ bpy.data.meshes.remove(me0)
+ bpy.data.meshes.remove(me)
+
+ return {'FINISHED'}
+
+class tissue_weight_contour_curves_pattern(Operator):
+ bl_idname = "object.tissue_weight_contour_curves_pattern"
+ bl_label = "Contour Curves"
+ bl_description = ("")
+ bl_options = {'REGISTER', 'UNDO'}
+
+ use_modifiers : BoolProperty(
+ name="Use Modifiers", default=True,
+ description="Apply all the modifiers")
+
+ auto_bevel : BoolProperty(
+ name="Automatic Bevel", default=False,
+ description="Bevel depends on weight density")
+
+ min_iso : FloatProperty(
+ name="Min Value", default=0., soft_min=0, soft_max=1,
+ description="Minimum weight value")
+ max_iso : FloatProperty(
+ name="Max Value", default=1, soft_min=0, soft_max=1,
+ description="Maximum weight value")
+ n_curves : IntProperty(
+ name="Curves", default=10, soft_min=1, soft_max=100,
+ description="Number of Contour Curves")
+ min_rad = 1
+ max_rad = 1
+
+ in_displace : FloatProperty(
+ name="Displace A", default=0, soft_min=-10, soft_max=10,
+ description="Pattern displace strength")
+ out_displace : FloatProperty(
+ name="Displace B", default=2, soft_min=-10, soft_max=10,
+ description="Pattern displace strength")
+
+ in_steps : IntProperty(
+ name="Steps A", default=1, min=0, soft_max=10,
+ description="Number of layers to move inwards")
+ out_steps : IntProperty(
+ name="Steps B", default=1, min=0, soft_max=10,
+ description="Number of layers to move outwards")
+ limit_z : BoolProperty(
+ name="Limit Z", default=False,
+ description="Limit Pattern in Z")
+
+ merge : BoolProperty(
+ name="Merge Vertices", default=True,
+ description="Merge points")
+ merge_thres : FloatProperty(
+ name="Merge Threshold", default=0.01, min=0, soft_max=1,
+ description="Minimum Curve Radius")
+
+ bevel_depth : FloatProperty(
+ name="Bevel Depth", default=0, min=0, soft_max=1,
+ description="")
+ min_bevel_depth : FloatProperty(
+ name="Min Bevel Depth", default=0.1, min=0, soft_max=1,
+ description="")
+ max_bevel_depth : FloatProperty(
+ name="Max Bevel Depth", default=1, min=0, soft_max=1,
+ description="")
+ remove_open_curves : BoolProperty(
+ name="Remove Open Curves", default=False,
+ description="Remove Open Curves")
+
+ vertex_group_pattern : StringProperty(
+ name="Displace", default='',
+ description="Vertex Group used for pattern displace")
+
+ vertex_group_bevel : StringProperty(
+ name="Bevel", default='',
+ description="Variable Bevel depth")
+
+ object_name : StringProperty(
+ name="Active Object", default='',
+ description="")
+
+ try: vg_name = bpy.context.object.vertex_groups.active.name
+ except: vg_name = ''
+
+ vertex_group_contour : StringProperty(
+ name="Contour", default=vg_name,
+ description="Vertex Group used for contouring")
+ clean_distance : FloatProperty(
+ name="Clean Distance", default=0, min=0, soft_max=10,
+ description="Remove short segments")
+
+
+ spiralized: BoolProperty(
+ name='Spiralized', default=False,
+ description='Create a Spiral Contour. Works better with dense meshes.'
+ )
+ spiral_axis: FloatVectorProperty(
+ name="Spiral Axis", default=(0,0,1),
+ description="Axis of the Spiral (in local coordinates)"
+ )
+ spiral_rotation : FloatProperty(
+ name="Spiral Rotation", default=0, min=0, max=2*pi,
+ description=""
+ )
+
+ @classmethod
+ def poll(cls, context):
+ ob = context.object
+ return ob and len(ob.vertex_groups) > 0 or ob.type == 'CURVE'
+
+ def invoke(self, context, event):
+ return context.window_manager.invoke_props_dialog(self, width=250)
+
+ def draw(self, context):
+ if not context.object.type == 'CURVE':
+ self.object_name = context.object.name
+ ob = bpy.data.objects[self.object_name]
+ if self.vertex_group_contour not in [vg.name for vg in ob.vertex_groups]:
+ self.vertex_group_contour = ob.vertex_groups.active.name
+ layout = self.layout
+ col = layout.column(align=True)
+ col.prop(self, "use_modifiers")
+ col.label(text="Contour Curves:")
+ col.prop_search(self, 'vertex_group_contour', ob, "vertex_groups", text='')
+ row = col.row(align=True)
+ row.prop(self,'min_iso')
+ row.prop(self,'max_iso')
+ col.prop(self,'n_curves')
+ col.separator()
+ col.label(text='Curves Bevel:')
+ col.prop(self,'auto_bevel')
+ if not self.auto_bevel:
+ col.prop_search(self, 'vertex_group_bevel', ob, "vertex_groups", text='')
+ if self.vertex_group_bevel != '' or self.auto_bevel:
+ row = col.row(align=True)
+ row.prop(self,'min_bevel_depth')
+ row.prop(self,'max_bevel_depth')
+ else:
+ col.prop(self,'bevel_depth')
+ col.separator()
+
+ col.label(text="Displace Pattern:")
+ col.prop_search(self, 'vertex_group_pattern', ob, "vertex_groups", text='')
+ if self.vertex_group_pattern != '':
+ row = col.row(align=True)
+ row.prop(self,'in_steps')
+ row.prop(self,'out_steps')
+ row = col.row(align=True)
+ row.prop(self,'in_displace')
+ row.prop(self,'out_displace')
+ col.prop(self,'limit_z')
+ col.separator()
+ row=col.row(align=True)
+ row.prop(self,'spiralized')
+ row.label(icon='MOD_SCREW')
+ if self.spiralized:
+ #row=col.row(align=True)
+ #row.prop(self,'spiral_axis')
+ #col.separator()
+ col.prop(self,'spiral_rotation')
+ col.separator()
+
+ col.label(text='Clean Curves:')
+ col.prop(self,'clean_distance')
+ col.prop(self,'remove_open_curves')
+
+ def execute(self, context):
+ n_curves = self.n_curves
+ start_time = timeit.default_timer()
+ try:
+ check = context.object.vertex_groups[0]
+ except:
+ self.report({'ERROR'}, "The object doesn't have Vertex Groups")
+ return {'CANCELLED'}
+ ob0 = bpy.data.objects[self.object_name]
+
+ dg = context.evaluated_depsgraph_get()
+ ob = ob0.evaluated_get(dg)
+ me0 = ob.data
+
+ # generate new bmesh
+ bm = bmesh.new()
+ bm.from_mesh(me0)
+ n_verts = len(bm.verts)
+
+ # store weight values
+ try:
+ weight = get_weight_numpy(ob.vertex_groups[self.vertex_group_contour], len(me0.vertices))
+ except:
+ bm.free()
+ self.report({'ERROR'}, "Please select a Vertex Group for contouring")
+ return {'CANCELLED'}
+
+ try:
+ pattern_weight = get_weight_numpy(ob.vertex_groups[self.vertex_group_pattern], len(me0.vertices))
+ except:
+ #self.report({'WARNING'}, "There is no Vertex Group assigned to the pattern displace")
+ pattern_weight = np.zeros(len(me0.vertices))
+
+ variable_bevel = False
+ try:
+ bevel_weight = get_weight_numpy(ob.vertex_groups[self.vertex_group_bevel], len(me0.vertices))
+ variable_bevel = True
+ except:
+ bevel_weight = np.ones(len(me0.vertices))
+
+ if self.auto_bevel:
+ # calc weight density
+ bevel_weight = np.ones(len(me0.vertices))*10000
+ bevel_weight = np.zeros(len(me0.vertices))
+ edges_length = np.array([e.calc_length() for e in bm.edges])
+ edges_dw = np.array([max(abs(weight[e.verts[0].index]-weight[e.verts[1].index]),0.000001) for e in bm.edges])
+ dens = edges_length/edges_dw
+ n_records = np.zeros(len(me0.vertices))
+ for i, e in enumerate(bm.edges):
+ for v in e.verts:
+ id = v.index
+ #bevel_weight[id] = min(bevel_weight[id], dens[i])
+ bevel_weight[id] += dens[i]
+ n_records[id] += 1
+ bevel_weight = bevel_weight/n_records
+ bevel_weight = (bevel_weight - min(bevel_weight))/(max(bevel_weight) - min(bevel_weight))
+ #bevel_weight = 1-bevel_weight
+ variable_bevel = True
+
+ #filtered_edges = bm.edges
+ total_verts = np.zeros((0,3))
+ total_radii = np.zeros((0,1))
+ total_segments = []# np.array([])
+ radius = []
+
+ # start iterate contours levels
+ vertices, normals = get_vertices_and_normals_numpy(me0)
+ filtered_edges = get_edges_id_numpy(me0)
+
+
+ min_iso = min(self.min_iso, self.max_iso)
+ max_iso = max(self.min_iso, self.max_iso)
+
+ # Spiral
+ if self.spiralized:
+ nx = normals[:,0]
+ ny = normals[:,1]
+ ang = self.spiral_rotation + weight*pi*n_curves+arctan2(nx,ny)
+ weight = sin(ang)/2+0.5
+ n_curves = 1
+
+ if n_curves > 1:
+ delta_iso = (max_iso-min_iso)/(n_curves-1)
+
+ else:
+ delta_iso = None
+
+ faces_weight = [np.array([weight[v] for v in p.vertices]) for p in me0.polygons]
+ fw_min = np.array([np.min(fw) for fw in faces_weight])
+ fw_max = np.array([np.max(fw) for fw in faces_weight])
+
+ bm_faces = np.array(bm.faces)
+
+ #print("Contour Curves, data loaded: " + str(timeit.default_timer() - start_time) + " sec")
+ step_time = timeit.default_timer()
+ for c in range(n_curves):
+ if delta_iso:
+ iso_val = c*delta_iso + min_iso
+ if iso_val < 0: iso_val = (min_iso + max_iso)/2
+ else:
+ iso_val = (min_iso + max_iso)/2
+
+ #if c == 0 and self.auto_bevel:
+
+
+ # remove passed faces
+ bool_mask = iso_val < fw_max
+ bm_faces = bm_faces[bool_mask]
+ fw_min = fw_min[bool_mask]
+ fw_max = fw_max[bool_mask]
+
+ # mask faces
+ bool_mask = fw_min < iso_val
+ faces_mask = bm_faces[bool_mask]
+
+ count = len(total_verts)
+
+ new_filtered_edges, edges_index, verts, bevel = contour_edges_pattern(self, c, len(total_verts), iso_val, vertices, normals, filtered_edges, weight, pattern_weight, bevel_weight)
+
+ if len(edges_index) > 0:
+ if self.auto_bevel and False:
+ bevel = 1-dens[edges_index]
+ bevel = bevel[:,np.newaxis]
+ if self.max_bevel_depth != self.min_bevel_depth:
+ min_radius = self.min_bevel_depth / max(0.0001,self.max_bevel_depth)
+ radii = min_radius + bevel*(1 - min_radius)
+ else:
+ radii = bevel
+ else:
+ continue
+
+ if verts[0,0] == None: continue
+ else: filtered_edges = new_filtered_edges
+ edges_id = {}
+ for i, id in enumerate(edges_index): edges_id[id] = i + count
+
+ if len(verts) == 0: continue
+
+ # finding segments
+ segments = []
+ for f in faces_mask:
+ seg = []
+ for e in f.edges:
+ try:
+ #seg.append(new_ids[np.where(edges_index == e.index)[0][0]])
+ seg.append(edges_id[e.index])
+ if len(seg) == 2:
+ segments.append(seg)
+ seg = []
+ except: pass
+
+ total_segments = total_segments + segments
+ total_verts = np.concatenate((total_verts, verts))
+ total_radii = np.concatenate((total_radii, radii))
+
+ if self.min_rad != self.max_rad:
+ try:
+ iso_rad = c*(self.max_rad-self.min_rad)/(self.n_curves-1)+self.min_rad
+ if iso_rad < 0: iso_rad = (self.min_rad + self.max_rad)/2
+ except:
+ iso_rad = (self.min_rad + self.max_rad)/2
+ radius = radius + [iso_rad]*len(verts)
+ #print("Contour Curves, points computing: " + str(timeit.default_timer() - step_time) + " sec")
+ step_time = timeit.default_timer()
+
+ if len(total_segments) > 0:
+ step_time = timeit.default_timer()
+ ordered_points = find_curves(total_segments, len(total_verts))
+
+ #print("Contour Curves, point ordered in: " + str(timeit.default_timer() - step_time) + " sec")
+ step_time = timeit.default_timer()
+ crv = curve_from_pydata(total_verts, total_radii, ordered_points, ob0.name + '_ContourCurves', self.remove_open_curves, merge_distance=self.clean_distance)
+ context.view_layer.objects.active = crv
+ if variable_bevel: crv.data.bevel_depth = self.max_bevel_depth
+ else: crv.data.bevel_depth = self.bevel_depth
+
+ crv.select_set(True)
+ ob0.select_set(False)
+ crv.matrix_world = ob0.matrix_world
+ #print("Contour Curves, curves created in: " + str(timeit.default_timer() - step_time) + " sec")
+ else:
+ bm.free()
+ self.report({'ERROR'}, "There are no values in the chosen range")
+ return {'CANCELLED'}
+ bm.free()
+ print("Contour Curves, total time: " + str(timeit.default_timer() - start_time) + " sec")
+ return {'FINISHED'}
+
+class vertex_colors_to_vertex_groups(Operator):
+ bl_idname = "object.vertex_colors_to_vertex_groups"
+ bl_label = "Vertex Color"
+ bl_options = {'REGISTER', 'UNDO'}
+ bl_description = ("Convert the active Vertex Color into a Vertex Group.")
+
+ red : BoolProperty(
+ name="red channel", default=False, description="convert red channel")
+ green : BoolProperty(
+ name="green channel", default=False,
+ description="convert green channel")
+ blue : BoolProperty(
+ name="blue channel", default=False, description="convert blue channel")
+ value : BoolProperty(
+ name="value channel", default=True, description="convert value channel")
+ invert : BoolProperty(
+ name="invert", default=False, description="invert all color channels")
+
+ @classmethod
+ def poll(cls, context):
+ try:
+ return len(context.object.data.vertex_colors) > 0
+ except: return False
+
+ def execute(self, context):
+ obj = context.active_object
+ id = len(obj.vertex_groups)
+ id_red = id
+ id_green = id
+ id_blue = id
+ id_value = id
+
+ boolCol = len(obj.data.vertex_colors)
+ if(boolCol): col_name = obj.data.vertex_colors.active.name
+ bpy.ops.object.mode_set(mode='EDIT')
+ bpy.ops.mesh.select_all(action='SELECT')
+
+ if(self.red and boolCol):
+ bpy.ops.object.vertex_group_add()
+ bpy.ops.object.vertex_group_assign()
+ id_red = id
+ obj.vertex_groups[id_red].name = col_name + '_red'
+ id+=1
+ if(self.green and boolCol):
+ bpy.ops.object.vertex_group_add()
+ bpy.ops.object.vertex_group_assign()
+ id_green = id
+ obj.vertex_groups[id_green].name = col_name + '_green'
+ id+=1
+ if(self.blue and boolCol):
+ bpy.ops.object.vertex_group_add()
+ bpy.ops.object.vertex_group_assign()
+ id_blue = id
+ obj.vertex_groups[id_blue].name = col_name + '_blue'
+ id+=1
+ if(self.value and boolCol):
+ bpy.ops.object.vertex_group_add()
+ bpy.ops.object.vertex_group_assign()
+ id_value = id
+ obj.vertex_groups[id_value].name = col_name + '_value'
+ id+=1
+
+ mult = 1
+ if(self.invert): mult = -1
+ bpy.ops.object.mode_set(mode='OBJECT')
+ sub_red = 1 + self.value + self.blue + self.green
+ sub_green = 1 + self.value + self.blue
+ sub_blue = 1 + self.value
+ sub_value = 1
+
+ id = len(obj.vertex_groups)
+ if(id_red <= id and id_green <= id and id_blue <= id and id_value <= \
+ id and boolCol):
+ v_colors = obj.data.vertex_colors.active.data
+ i = 0
+ for f in obj.data.polygons:
+ for v in f.vertices:
+ gr = obj.data.vertices[v].groups
+ if(self.red): gr[min(len(gr)-sub_red, id_red)].weight = \
+ self.invert + mult * v_colors[i].color[0]
+ if(self.green): gr[min(len(gr)-sub_green, id_green)].weight\
+ = self.invert + mult * v_colors[i].color[1]
+ if(self.blue): gr[min(len(gr)-sub_blue, id_blue)].weight = \
+ self.invert + mult * v_colors[i].color[2]
+ if(self.value):
+ r = v_colors[i].color[0]
+ g = v_colors[i].color[1]
+ b = v_colors[i].color[2]
+ gr[min(len(gr)-sub_value, id_value)].weight\
+ = self.invert + mult * (0.2126*r + 0.7152*g + 0.0722*b)
+ i+=1
+ bpy.ops.paint.weight_paint_toggle()
+ return {'FINISHED'}
+
+class vertex_group_to_vertex_colors(Operator):
+ bl_idname = "object.vertex_group_to_vertex_colors"
+ bl_label = "Vertex Group"
+ bl_options = {'REGISTER', 'UNDO'}
+ bl_description = ("Convert the active Vertex Group into a Vertex Color.")
+
+ channel : EnumProperty(
+ items=[('BLUE', 'Blue Channel', 'Convert to Blue Channel'),
+ ('GREEN', 'Green Channel', 'Convert to Green Channel'),
+ ('RED', 'Red Channel', 'Convert to Red Channel'),
+ ('VALUE', 'Value Channel', 'Convert to Grayscale'),
+ ('FALSE_COLORS', 'False Colors', 'Convert to False Colors')],
+ name="Convert to", description="Choose how to convert vertex group",
+ default="VALUE", options={'LIBRARY_EDITABLE'})
+
+ invert : BoolProperty(
+ name="invert", default=False, description="invert color channel")
+
+ @classmethod
+ def poll(cls, context):
+ return len(context.object.vertex_groups) > 0
+
+ def execute(self, context):
+ obj = context.active_object
+ me = obj.data
+ group_id = obj.vertex_groups.active_index
+ if (group_id == -1):
+ return {'FINISHED'}
+
+ bpy.ops.object.mode_set(mode='OBJECT')
+ group_name = obj.vertex_groups[group_id].name
+ bpy.ops.mesh.vertex_color_add()
+ colors_id = obj.data.vertex_colors.active_index
+
+ colors_name = group_name
+ if(self.channel == 'FALSE_COLORS'): colors_name += "_false_colors"
+ elif(self.channel == 'VALUE'): colors_name += "_value"
+ elif(self.channel == 'RED'): colors_name += "_red"
+ elif(self.channel == 'GREEN'): colors_name += "_green"
+ elif(self.channel == 'BLUE'): colors_name += "_blue"
+ context.object.data.vertex_colors[colors_id].name = colors_name
+
+ v_colors = obj.data.vertex_colors.active.data
+
+ bm = bmesh.new()
+ bm.from_mesh(me)
+ dvert_lay = bm.verts.layers.deform.active
+ weight = bmesh_get_weight_numpy(group_id,dvert_lay,bm.verts)
+ if self.invert: weight = 1-weight
+ loops_size = get_attribute_numpy(me.polygons, attribute='loop_total', mult=1)
+ n_colors = np.sum(loops_size)
+ verts = np.ones(n_colors)
+ me.polygons.foreach_get('vertices',verts)
+ splitted_weight = weight[verts.astype(int)][:,None]
+ r = np.zeros(splitted_weight.shape)
+ g = np.zeros(splitted_weight.shape)
+ b = np.zeros(splitted_weight.shape)
+ a = np.ones(splitted_weight.shape)
+ if(self.channel == 'FALSE_COLORS'):
+ mult = 0.6+0.4*splitted_weight
+ mask = splitted_weight < 0.25
+ g[mask] = splitted_weight[mask]*4
+ b[mask] = np.ones(splitted_weight.shape)[mask]
+
+ mask = np.where(np.logical_and(splitted_weight>=0.25, splitted_weight<0.5))
+ g[mask] = np.ones(splitted_weight.shape)[mask]
+ b[mask] = (1-(splitted_weight[mask]-0.25)*4)
+
+ mask = np.where(np.logical_and(splitted_weight>=0.5, splitted_weight<0.75))
+ r[mask] = (splitted_weight[mask]-0.5)*4
+ g[mask] = np.ones(splitted_weight.shape)[mask]
+
+ mask = 0.75 <= splitted_weight
+ r[mask] = np.ones(splitted_weight.shape)[mask]
+ g[mask] = (1-(splitted_weight[mask]-0.75)*4)
+ elif(self.channel == 'VALUE'):
+ r = splitted_weight
+ g = splitted_weight
+ b = splitted_weight
+ elif(self.channel == 'RED'):
+ r = splitted_weight
+ elif(self.channel == 'GREEN'):
+ g = splitted_weight
+ elif(self.channel == 'BLUE'):
+ b = splitted_weight
+
+ colors = np.concatenate((r,g,b,a),axis=1).flatten()
+ v_colors.foreach_set('color',colors)
+
+ bpy.ops.paint.vertex_paint_toggle()
+ context.object.data.vertex_colors[colors_id].active_render = True
+ return {'FINISHED'}
+
+class vertex_group_to_uv(Operator):
+ bl_idname = "object.vertex_group_to_uv"
+ bl_label = "Vertex Group"
+ bl_options = {'REGISTER', 'UNDO'}
+ bl_description = ("Combine two Vertex Groups as UV Map Layer.")
+
+ vertex_group_u : StringProperty(
+ name="U", default='',
+ description="Vertex Group used for the U coordinate")
+ vertex_group_v : StringProperty(
+ name="V", default='',
+ description="Vertex Group used for the V coordinate")
+ normalize_weight : BoolProperty(
+ name="Normalize Weight", default=True,
+ description="Normalize weight values")
+ invert_u : BoolProperty(
+ name="Invert U", default=False, description="Invert U")
+ invert_v : BoolProperty(
+ name="Invert V", default=False, description="Invert V")
+
+ @classmethod
+ def poll(cls, context):
+ return len(context.object.vertex_groups) > 0
+
+ def invoke(self, context, event):
+ return context.window_manager.invoke_props_dialog(self, width=250)
+
+ def draw(self, context):
+ ob = context.object
+ layout = self.layout
+ col = layout.column(align=True)
+ row = col.row(align=True)
+ row.prop_search(self, 'vertex_group_u', ob, "vertex_groups", text='')
+ row.separator()
+ row.prop_search(self, 'vertex_group_v', ob, "vertex_groups", text='')
+ row = col.row(align=True)
+ row.prop(self, "invert_u")
+ row.separator()
+ row.prop(self, "invert_v")
+ row = col.row(align=True)
+ row.prop(self, "normalize_weight")
+
+ def execute(self, context):
+ ob = context.active_object
+ me = ob.data
+ n_verts = len(me.vertices)
+ vg_keys = ob.vertex_groups.keys()
+ bool_u = self.vertex_group_u in vg_keys
+ bool_v = self.vertex_group_v in vg_keys
+ if bool_u or bool_v:
+ bm = bmesh.new()
+ bm.from_mesh(me)
+ dvert_lay = bm.verts.layers.deform.active
+ if bool_u:
+ u_index = ob.vertex_groups[self.vertex_group_u].index
+ u = bmesh_get_weight_numpy(u_index, dvert_lay, bm.verts)
+ if self.invert_u:
+ u = 1-u
+ if self.normalize_weight:
+ u = np.interp(u, (u.min(), u.max()), (0, 1))
+ else:
+ u = np.zeros(n_verts)
+ if bool_v:
+ v_index = ob.vertex_groups[self.vertex_group_v].index
+ v = bmesh_get_weight_numpy(v_index, dvert_lay, bm.verts)
+ if self.invert_v:
+ v = 1-v
+ if self.normalize_weight:
+ v = np.interp(v, (v.min(), v.max()), (0, 1))
+ else:
+ v = np.zeros(n_verts)
+ else:
+ u = v = np.zeros(n_verts)
+
+ uv_layer = me.uv_layers.new(name='Weight_to_UV')
+ loops_size = get_attribute_numpy(me.polygons, attribute='loop_total', mult=1)
+ n_data = np.sum(loops_size)
+ v_id = np.ones(n_data)
+ me.polygons.foreach_get('vertices',v_id)
+ v_id = v_id.astype(int)
+ split_u = u[v_id,None]
+ split_v = v[v_id,None]
+ uv = np.concatenate((split_u,split_v),axis=1).flatten()
+ uv_layer.data.foreach_set('uv',uv)
+ me.uv_layers.update()
+ return {'FINISHED'}
+
+class curvature_to_vertex_groups(Operator):
+ bl_idname = "object.curvature_to_vertex_groups"
+ bl_label = "Curvature"
+ bl_options = {'REGISTER', 'UNDO'}
+ bl_description = ("Generate a Vertex Group based on the curvature of the"
+ "mesh. Is based on Dirty Vertex Color.")
+
+ invert : BoolProperty(
+ name="invert", default=False, description="invert values")
+
+ blur_strength : FloatProperty(
+ name="Blur Strength", default=1, min=0.001,
+ max=1, description="Blur strength per iteration")
+
+ blur_iterations : IntProperty(
+ name="Blur Iterations", default=1, min=0,
+ max=40, description="Number of times to blur the values")
+
+ min_angle : FloatProperty(
+ name="Min Angle", default=0, min=0,
+ max=pi/2, subtype='ANGLE', description="Minimum angle")
+
+ max_angle : FloatProperty(
+ name="Max Angle", default=pi, min=pi/2,
+ max=pi, subtype='ANGLE', description="Maximum angle")
+
+ invert : BoolProperty(
+ name="Invert", default=False,
+ description="Invert the curvature map")
+
+ def execute(self, context):
+ bpy.ops.object.mode_set(mode='OBJECT')
+ bpy.ops.mesh.vertex_color_add()
+ vertex_colors = context.active_object.data.vertex_colors
+ vertex_colors[-1].active = True
+ vertex_colors[-1].active_render = True
+ vertex_colors[-1].name = "Curvature"
+ for c in vertex_colors[-1].data: c.color = (1,1,1,1)
+ bpy.ops.object.mode_set(mode='VERTEX_PAINT')
+ bpy.ops.paint.vertex_color_dirt(
+ blur_strength=self.blur_strength,
+ blur_iterations=self.blur_iterations, clean_angle=self.max_angle,
+ dirt_angle=self.min_angle)
+ bpy.ops.object.vertex_colors_to_vertex_groups(invert=self.invert)
+ bpy.ops.mesh.vertex_color_remove()
+ return {'FINISHED'}
+
+class face_area_to_vertex_groups(Operator):
+ bl_idname = "object.face_area_to_vertex_groups"
+ bl_label = "Area"
+ bl_options = {'REGISTER', 'UNDO'}
+ bl_description = ("Generate a Vertex Group based on the area of individual"
+ "faces.")
+
+ invert : BoolProperty(
+ name="invert", default=False, description="invert values")
+ bounds : EnumProperty(
+ items=(('MANUAL', "Manual Bounds", ""),
+ ('AUTOMATIC', "Automatic Bounds", "")),
+ default='AUTOMATIC', name="Bounds")
+
+ min_area : FloatProperty(
+ name="Min", default=0.01, soft_min=0, soft_max=1,
+ description="Faces with 0 weight")
+
+ max_area : FloatProperty(
+ name="Max", default=0.1, soft_min=0, soft_max=1,
+ description="Faces with 1 weight")
+
+ def draw(self, context):
+ layout = self.layout
+ layout.label(text="Bounds")
+ layout.prop(self, "bounds", text="")
+ if self.bounds == 'MANUAL':
+ layout.prop(self, "min_area")
+ layout.prop(self, "max_area")
+
+ def execute(self, context):
+ try: ob = context.object
+ except:
+ self.report({'ERROR'}, "Please select an Object")
+ return {'CANCELLED'}
+ ob.vertex_groups.new(name="Faces Area")
+
+ areas = [[] for v in ob.data.vertices]
+
+ for p in ob.data.polygons:
+ for v in p.vertices:
+ areas[v].append(p.area)
+
+ for i in range(len(areas)):
+ areas[i] = mean(areas[i])
+ if self.bounds == 'MANUAL':
+ min_area = self.min_area
+ max_area = self.max_area
+ elif self.bounds == 'AUTOMATIC':
+ min_area = min(areas)
+ max_area = max(areas)
+ elif self.bounds == 'COMPRESSION':
+ min_area = 1
+ max_area = min(areas)
+ elif self.bounds == 'TENSION':
+ min_area = 1
+ max_area = max(areas)
+ delta_area = max_area - min_area
+ if delta_area == 0:
+ delta_area = 0.0001
+ if self.bounds == 'MANUAL':
+ delta_area = 0.0001
+ else:
+ self.report({'ERROR'}, "The faces have the same areas")
+ #return {'CANCELLED'}
+ for i in range(len(areas)):
+ weight = (areas[i] - min_area)/delta_area
+ ob.vertex_groups[-1].add([i], weight, 'REPLACE')
+ ob.vertex_groups.update()
+ ob.data.update()
+ bpy.ops.object.mode_set(mode='WEIGHT_PAINT')
+ return {'FINISHED'}
+
+class random_weight(Operator):
+ bl_idname = "object.random_weight"
+ bl_label = "Random"
+ bl_options = {'REGISTER', 'UNDO'}
+ bl_description = ("Generate a random Vertex Group")
+
+ min_val : FloatProperty(
+ name="Min", default=0, soft_min=0, soft_max=1,
+ description="Minimum Value")
+
+ max_val : FloatProperty(
+ name="Max", default=1, soft_min=0, soft_max=1,
+ description="Maximum Value")
+
+ #def draw(self, context):
+ # layout = self.layout
+ # layout.prop(self, "min_area")
+ # layout.prop(self, "max_area")
+ @classmethod
+ def poll(cls, context):
+ return len(context.object.vertex_groups) > 0
+
+ def execute(self, context):
+ try: ob = context.object
+ except:
+ self.report({'ERROR'}, "Please select an Object")
+ return {'CANCELLED'}
+ #ob.vertex_groups.new(name="Random")
+ n_verts = len(ob.data.vertices)
+ weight = np.random.uniform(low=self.min_val, high=self.max_val, size=(n_verts,))
+ np.clip(weight, 0, 1, out=weight)
+
+ group_id = ob.vertex_groups.active_index
+ for i in range(n_verts):
+ ob.vertex_groups[group_id].add([i], weight[i], 'REPLACE')
+ ob.vertex_groups.update()
+ ob.data.update()
+ bpy.ops.object.mode_set(mode='WEIGHT_PAINT')
+ return {'FINISHED'}
+
+
+class harmonic_weight(Operator):
+ bl_idname = "object.harmonic_weight"
+ bl_label = "Harmonic"
+ bl_options = {'REGISTER', 'UNDO'}
+ bl_description = ("Create an harmonic variation of the active Vertex Group")
+
+ freq : FloatProperty(
+ name="Frequency", default=20, soft_min=0,
+ soft_max=100, description="Wave frequency")
+
+ amp : FloatProperty(
+ name="Amplitude", default=1, soft_min=0,
+ soft_max=10, description="Wave amplitude")
+
+ midlevel : FloatProperty(
+ name="Midlevel", default=0, min=-1,
+ max=1, description="Midlevel")
+
+ add : FloatProperty(
+ name="Add", default=0, min=-1,
+ max=1, description="Add to the Weight")
+
+ mult : FloatProperty(
+ name="Multiply", default=0, min=0,
+ max=1, description="Multiply for he Weight")
+
+ @classmethod
+ def poll(cls, context):
+ return len(context.object.vertex_groups) > 0
+
+ def execute(self, context):
+ ob = context.active_object
+ if len(ob.vertex_groups) > 0:
+ group_id = ob.vertex_groups.active_index
+ ob.vertex_groups.new(name="Harmonic")
+ for i in range(len(ob.data.vertices)):
+ try: val = ob.vertex_groups[group_id].weight(i)
+ except: val = 0
+ weight = self.amp*(math.sin(val*self.freq) - self.midlevel)/2 + 0.5 + self.add*val*(1-(1-val)*self.mult)
+ ob.vertex_groups[-1].add([i], weight, 'REPLACE')
+ ob.data.update()
+ else:
+ self.report({'ERROR'}, "Active object doesn't have vertex groups")
+ return {'CANCELLED'}
+ bpy.ops.object.mode_set(mode='WEIGHT_PAINT')
+ return {'FINISHED'}
+
+
+class tissue_weight_distance(Operator):
+ bl_idname = "object.tissue_weight_distance"
+ bl_label = "Weight Distance"
+ bl_options = {'REGISTER', 'UNDO'}
+ bl_description = ("Create a weight map according to the distance from the "
+ "selected vertices along the mesh surface")
+
+ mode : EnumProperty(
+ items=(('GEOD', "Geodesic Distance", ""),
+ ('EUCL', "Euclidean Distance", ""),
+ ('TOPO', "Topology Distance", "")),
+ default='GEOD', name="Distance Method")
+
+ normalize : BoolProperty(
+ name="Normalize", default=True,
+ description="Automatically remap the distance values from 0 to 1")
+
+ min_value : FloatProperty(
+ name="Min", default=0, min=0,
+ soft_max=100, description="Minimum Distance")
+
+ max_value : FloatProperty(
+ name="Max", default=10, min=0,
+ soft_max=100, description="Max Distance")
+
+ def invoke(self, context, event):
+ return context.window_manager.invoke_props_dialog(self, width=250)
+
+ def fill_neighbors(self,verts,weight):
+ neigh = {}
+ for v0 in verts:
+ for f in v0.link_faces:
+ for v1 in f.verts:
+ if self.mode == 'GEOD':
+ dist = weight[v0.index] + (v0.co-v1.co).length
+ elif self.mode == 'TOPO':
+ dist = weight[v0.index] + 1.0
+ w1 = weight[v1.index]
+ if w1 == None or w1 > dist:
+ weight[v1.index] = dist
+ neigh[v1] = 0
+ if len(neigh) == 0: return weight
+ else: return self.fill_neighbors(neigh.keys(), weight)
+
+ def execute(self, context):
+ ob = context.object
+ old_mode = ob.mode
+ if old_mode != 'OBJECT':
+ bpy.ops.object.mode_set(mode='OBJECT')
+
+ me = ob.data
+
+ # store weight values
+ weight = [None]*len(me.vertices)
+
+ if self.mode != 'EUCL':
+ bm = bmesh.new()
+ bm.from_mesh(me)
+ bm.verts.ensure_lookup_table()
+ bm.edges.ensure_lookup_table()
+ bm.faces.ensure_lookup_table()
+ selected = [v for v in bm.verts if v.select]
+ if len(selected) == 0:
+ bpy.ops.object.mode_set(mode=old_mode)
+ message = "Please, select one or more vertices"
+ self.report({'ERROR'}, message)
+ return {'CANCELLED'}
+ for v in selected: weight[v.index] = 0
+ weight = self.fill_neighbors(selected, weight)
+ bm.free()
+ else:
+ selected = [v for v in me.vertices if v.select]
+ kd = KDTree(len(selected))
+ for i, v in enumerate(selected):
+ kd.insert(v.co, i)
+ kd.balance()
+ for i,v in enumerate(me.vertices):
+ co, index, dist = kd.find(v.co)
+ weight[i] = dist
+
+
+ for i in range(len(weight)):
+ if weight[i] == None: weight[i] = 0
+ weight = np.array(weight)
+ max_dist = np.max(weight)
+ if self.normalize:
+ if max_dist > 0:
+ weight /= max_dist
+ else:
+ delta_value = self.max_value - self.min_value
+ if delta_value == 0: delta_value = 0.0000001
+ weight = (weight-self.min_value)/delta_value
+
+ if self.mode == 'TOPO':
+ vg = ob.vertex_groups.new(name='Distance: {:d}'.format(int(max_dist)))
+ else:
+ vg = ob.vertex_groups.new(name='Distance: {:.4f}'.format(max_dist))
+ for i, w in enumerate(weight):
+ vg.add([i], w, 'REPLACE')
+ bpy.ops.object.mode_set(mode='WEIGHT_PAINT')
+ return {'FINISHED'}
+
+class TISSUE_PT_color(Panel):
+ bl_label = "Tissue Tools"
+ bl_category = "Tissue"
+ bl_space_type = "VIEW_3D"
+ bl_region_type = "UI"
+ #bl_options = {'DEFAULT_CLOSED'}
+ bl_context = "vertexpaint"
+
+ def draw(self, context):
+ layout = self.layout
+ col = layout.column(align=True)
+ col.operator("object.vertex_colors_to_vertex_groups",
+ icon="GROUP_VERTEX", text="Convert to Weight")
+
+class TISSUE_PT_weight(Panel):
+ bl_label = "Tissue Tools"
+ bl_category = "Tissue"
+ bl_space_type = "VIEW_3D"
+ bl_region_type = "UI"
+ #bl_options = {'DEFAULT_CLOSED'}
+ bl_context = "weightpaint"
+
+ def draw(self, context):
+ layout = self.layout
+ col = layout.column(align=True)
+ #if context.object.type == 'MESH' and context.mode == 'OBJECT':
+ #col.label(text="Transform:")
+ #col.separator()
+ #elif bpy.context.mode == 'PAINT_WEIGHT':
+ col.label(text="Weight Generate:")
+ #col.operator(
+ # "object.vertex_colors_to_vertex_groups", icon="GROUP_VCOL")
+ col.operator("object.face_area_to_vertex_groups", icon="FACESEL")
+ col.operator("object.curvature_to_vertex_groups", icon="SMOOTHCURVE")
+ col.operator("object.tissue_weight_distance", icon="TRACKING")
+ row = col.row(align=True)
+ try: row.operator("object.weight_formula", icon="CON_TRANSFORM")
+ except: row.operator("object.weight_formula")#, icon="CON_TRANSFORM")
+ row.operator("object.update_weight_formula", icon="FILE_REFRESH", text='')#, icon="CON_TRANSFORM")
+ #col.label(text="Weight Processing:")
+ col.separator()
+
+ # TO BE FIXED
+ col.operator("object.weight_laplacian", icon="SMOOTHCURVE")
+
+ col.label(text="Weight Edit:")
+ col.operator("object.harmonic_weight", icon="IPO_ELASTIC")
+ col.operator("object.random_weight", icon="RNDCURVE")
+ col.separator()
+ col.label(text="Deformation Analysis:")
+ col.operator("object.edges_deformation", icon="DRIVER_DISTANCE")#FULLSCREEN_ENTER")
+ col.operator("object.edges_bending", icon="DRIVER_ROTATIONAL_DIFFERENCE")#"MOD_SIMPLEDEFORM")
+ col.separator()
+ col.label(text="Weight Curves:")
+ #col.operator("object.weight_contour_curves", icon="MOD_CURVE")
+ col.operator("object.tissue_weight_streamlines", icon="ANIM")
+ col.operator("object.tissue_weight_contour_curves_pattern", icon="FORCE_TURBULENCE")
+ col.separator()
+ col.operator("object.weight_contour_displace", icon="MOD_DISPLACE")
+ col.operator("object.weight_contour_mask", icon="MOD_MASK")
+ col.separator()
+ col.label(text="Simulations:")
+ #col.operator("object.reaction_diffusion", icon="MOD_OCEAN")
+ col.operator("object.start_reaction_diffusion",
+ icon="EXPERIMENTAL",
+ text="Reaction-Diffusion")
+ col.separator()
+ col.label(text="Materials:")
+ col.operator("object.random_materials", icon='COLOR')
+ col.operator("object.weight_to_materials", icon='GROUP_VERTEX')
+ col.separator()
+ col.label(text="Weight Convert:")
+ col.operator("object.vertex_group_to_vertex_colors", icon="GROUP_VCOL",
+ text="Convert to Colors")
+ col.operator("object.vertex_group_to_uv", icon="UV",
+ text="Convert to UV")
+
+ #col.prop(context.object, "reaction_diffusion_run", icon="PLAY", text="Run Simulation")
+ ####col.prop(context.object, "reaction_diffusion_run")
+ #col.separator()
+ #col.label(text="Vertex Color from:")
+ #col.operator("object.vertex_group_to_vertex_colors", icon="GROUP_VERTEX")
+
+
+
+
+class start_reaction_diffusion(Operator):
+ bl_idname = "object.start_reaction_diffusion"
+ bl_label = "Start Reaction Diffusion"
+ bl_description = ("Run a Reaction-Diffusion based on existing Vertex Groups: A and B")
+ bl_options = {'REGISTER', 'UNDO'}
+
+ run : BoolProperty(
+ name="Run Reaction-Diffusion", default=True, description="Compute a new iteration on frame changes")
+
+ time_steps : IntProperty(
+ name="Steps", default=10, min=0, soft_max=50,
+ description="Number of Steps")
+
+ dt : FloatProperty(
+ name="dt", default=1, min=0, soft_max=0.2,
+ description="Time Step")
+
+ diff_a : FloatProperty(
+ name="Diff A", default=0.18, min=0, soft_max=2,
+ description="Diffusion A")
+
+ diff_b : FloatProperty(
+ name="Diff B", default=0.09, min=0, soft_max=2,
+ description="Diffusion B")
+
+ f : FloatProperty(
+ name="f", default=0.055, min=0, soft_min=0.01, soft_max=0.06, max=0.1, precision=4,
+ description="Feed Rate")
+
+ k : FloatProperty(
+ name="k", default=0.062, min=0, soft_min=0.035, soft_max=0.065, max=0.1, precision=4,
+ description="Kill Rate")
+
+ @classmethod
+ def poll(cls, context):
+ return context.object.type == 'MESH' and context.mode != 'EDIT_MESH'
+
+ def execute(self, context):
+ reaction_diffusion_add_handler(self, context)
+ set_animatable_fix_handler(self, context)
+
+ ob = context.object
+
+ ob.reaction_diffusion_settings.run = self.run
+ ob.reaction_diffusion_settings.dt = self.dt
+ ob.reaction_diffusion_settings.time_steps = self.time_steps
+ ob.reaction_diffusion_settings.f = self.f
+ ob.reaction_diffusion_settings.k = self.k
+ ob.reaction_diffusion_settings.diff_a = self.diff_a
+ ob.reaction_diffusion_settings.diff_b = self.diff_b
+
+
+ # check vertex group A
+ try:
+ vg = ob.vertex_groups['A']
+ except:
+ ob.vertex_groups.new(name='A')
+ # check vertex group B
+ try:
+ vg = ob.vertex_groups['B']
+ except:
+ ob.vertex_groups.new(name='B')
+
+ for v in ob.data.vertices:
+ ob.vertex_groups['A'].add([v.index], 1, 'REPLACE')
+ ob.vertex_groups['B'].add([v.index], 0, 'REPLACE')
+
+ ob.vertex_groups.update()
+ ob.data.update()
+ bpy.ops.object.mode_set(mode='WEIGHT_PAINT')
+
+ return {'FINISHED'}
+
+class reset_reaction_diffusion_weight(Operator):
+ bl_idname = "object.reset_reaction_diffusion_weight"
+ bl_label = "Reset Reaction Diffusion Weight"
+ bl_description = ("Set A and B weight to default values")
+ bl_options = {'REGISTER', 'UNDO'}
+
+ @classmethod
+ def poll(cls, context):
+ return context.object.type == 'MESH' and context.mode != 'EDIT_MESH'
+
+ def execute(self, context):
+ reaction_diffusion_add_handler(self, context)
+ set_animatable_fix_handler(self, context)
+
+ ob = context.object
+
+ # check vertex group A
+ try:
+ vg = ob.vertex_groups['A']
+ except:
+ ob.vertex_groups.new(name='A')
+ # check vertex group B
+ try:
+ vg = ob.vertex_groups['B']
+ except:
+ ob.vertex_groups.new(name='B')
+
+ for v in ob.data.vertices:
+ ob.vertex_groups['A'].add([v.index], 1, 'REPLACE')
+ ob.vertex_groups['B'].add([v.index], 0, 'REPLACE')
+
+ ob.vertex_groups.update()
+ ob.data.update()
+ bpy.ops.object.mode_set(mode='WEIGHT_PAINT')
+
+ return {'FINISHED'}
+
+class bake_reaction_diffusion(Operator):
+ bl_idname = "object.bake_reaction_diffusion"
+ bl_label = "Bake Data"
+ bl_description = ("Bake the Reaction-Diffusion to the cache directory")
+ bl_options = {'REGISTER', 'UNDO'}
+
+ @classmethod
+ def poll(cls, context):
+ return context.object.type == 'MESH' and context.mode != 'EDIT_MESH'
+
+ def execute(self, context):
+ ob = context.object
+ props = ob.reaction_diffusion_settings
+ if props.fast_bake:
+ bool_run = props.run
+ props.run = False
+ context.scene.frame_current = props.cache_frame_start
+ fast_bake_def(ob, frame_start=props.cache_frame_start, frame_end=props.cache_frame_end)
+ #create_fast_bake_def(ob, frame_start=props.cache_frame_start, frame_end=props.cache_frame_end)
+ context.scene.frame_current = props.cache_frame_end
+ props.run = bool_run
+ else:
+ for i in range(props.cache_frame_start, props.cache_frame_end):
+ context.scene.frame_current = i
+ reaction_diffusion_def(ob, bake=True)
+ props.bool_cache = True
+
+ return {'FINISHED'}
+
+class reaction_diffusion_free_data(Operator):
+ bl_idname = "object.reaction_diffusion_free_data"
+ bl_label = "Free Data"
+ bl_description = ("Free Reaction-Diffusion data")
+ bl_options = {'REGISTER', 'UNDO'}
+
+ @classmethod
+ def poll(cls, context):
+ return context.object.type == 'MESH'
+
+ def execute(self, context):
+ ob = context.object
+ props = ob.reaction_diffusion_settings
+ props.bool_cache = False
+
+ folder = Path(props.cache_dir)
+ for i in range(props.cache_frame_start, props.cache_frame_end):
+ data_a = folder / "a_{:04d}".format(i)
+ if os.path.exists(data_a):
+ os.remove(data_a)
+ data_a = folder / "b_{:04d}".format(i)
+ if os.path.exists(data_a):
+ os.remove(data_a)
+ return {'FINISHED'}
+
+from bpy.app.handlers import persistent
+
+def reaction_diffusion_scene(scene, bake=False):
+ for ob in scene.objects:
+ if ob.reaction_diffusion_settings.run:
+ reaction_diffusion_def(ob)
+
+def reaction_diffusion_def(ob, bake=False):
+
+ scene = bpy.context.scene
+ start = time.time()
+ if type(ob) == bpy.types.Scene: return None
+ props = ob.reaction_diffusion_settings
+
+ if bake or props.bool_cache:
+ if props.cache_dir == '':
+ letters = string.ascii_letters
+ random_name = ''.join(rnd.choice(letters) for i in range(6))
+ if bpy.context.blend_data.filepath == '':
+ folder = Path(bpy.context.preferences.filepaths.temporary_directory)
+ folder = folder / 'reaction_diffusion_cache' / random_name
+ else:
+ folder = '//' + Path(bpy.context.blend_data.filepath).stem
+ folder = Path(bpy.path.abspath(folder)) / 'reaction_diffusion_cache' / random_name
+ folder.mkdir(parents=True, exist_ok=True)
+ props.cache_dir = str(folder)
+ else:
+ folder = Path(props.cache_dir)
+
+ me = ob.data
+ n_edges = len(me.edges)
+ n_verts = len(me.vertices)
+ a = np.zeros(n_verts)
+ b = np.zeros(n_verts)
+
+ print("{:6d} Reaction-Diffusion: {}".format(scene.frame_current, ob.name))
+
+ if not props.bool_cache:
+
+ if props.bool_mod:
+ # hide deforming modifiers
+ mod_visibility = []
+ for m in ob.modifiers:
+ mod_visibility.append(m.show_viewport)
+ if not mod_preserve_shape(m): m.show_viewport = False
+
+ # evaluated mesh
+ dg = bpy.context.evaluated_depsgraph_get()
+ ob_eval = ob.evaluated_get(dg)
+ me = bpy.data.meshes.new_from_object(ob_eval, preserve_all_data_layers=True, depsgraph=dg)
+
+ # set original visibility
+ for v, m in zip(mod_visibility, ob.modifiers):
+ m.show_viewport = v
+ ob.modifiers.update()
+
+ bm = bmesh.new() # create an empty BMesh
+ bm.from_mesh(me) # fill it in from a Mesh
+ dvert_lay = bm.verts.layers.deform.active
+
+ dt = props.dt
+ time_steps = props.time_steps
+ f = props.f
+ k = props.k
+ diff_a = props.diff_a
+ diff_b = props.diff_b
+ scale = props.diff_mult
+
+ brush_mult = props.brush_mult
+
+ # store weight values
+ if 'dB' in ob.vertex_groups: db = np.zeros(n_verts)
+ if 'grad' in ob.vertex_groups: grad = np.zeros(n_verts)
+
+ if props.vertex_group_diff_a != '': diff_a = np.zeros(n_verts)
+ if props.vertex_group_diff_b != '': diff_b = np.zeros(n_verts)
+ if props.vertex_group_scale != '': scale = np.zeros(n_verts)
+ if props.vertex_group_f != '': f = np.zeros(n_verts)
+ if props.vertex_group_k != '': k = np.zeros(n_verts)
+ if props.vertex_group_brush != '': brush = np.zeros(n_verts)
+ else: brush = 0
+
+ group_index_a = ob.vertex_groups["A"].index
+ group_index_b = ob.vertex_groups["B"].index
+ a = bmesh_get_weight_numpy(group_index_a, dvert_lay, bm.verts)
+ b = bmesh_get_weight_numpy(group_index_b, dvert_lay, bm.verts)
+
+ if props.vertex_group_diff_a != '':
+ group_index = ob.vertex_groups[props.vertex_group_diff_a].index
+ diff_a = bmesh_get_weight_numpy(group_index, dvert_lay, bm.verts)
+ if props.invert_vertex_group_diff_a:
+ vg_bounds = (props.min_diff_a, props.max_diff_a)
+ else:
+ vg_bounds = (props.max_diff_a, props.min_diff_a)
+ diff_a = np.interp(diff_a, (0,1), vg_bounds)
+
+ if props.vertex_group_diff_b != '':
+ group_index = ob.vertex_groups[props.vertex_group_diff_b].index
+ diff_b = bmesh_get_weight_numpy(group_index, dvert_lay, bm.verts)
+ if props.invert_vertex_group_diff_b:
+ vg_bounds = (props.max_diff_b, props.min_diff_b)
+ else:
+ vg_bounds = (props.min_diff_b, props.max_diff_b)
+ diff_b = np.interp(diff_b, (0,1), vg_bounds)
+
+ if props.vertex_group_scale != '':
+ group_index = ob.vertex_groups[props.vertex_group_scale].index
+ scale = bmesh_get_weight_numpy(group_index, dvert_lay, bm.verts)
+ if props.invert_vertex_group_scale:
+ vg_bounds = (props.max_scale, props.min_scale)
+ else:
+ vg_bounds = (props.min_scale, props.max_scale)
+ scale = np.interp(scale, (0,1), vg_bounds)
+
+ if props.vertex_group_f != '':
+ group_index = ob.vertex_groups[props.vertex_group_f].index
+ f = bmesh_get_weight_numpy(group_index, dvert_lay, bm.verts)
+ if props.invert_vertex_group_f:
+ vg_bounds = (props.max_f, props.min_f)
+ else:
+ vg_bounds = (props.min_f, props.max_f)
+ f = np.interp(f, (0,1), vg_bounds, )
+
+ if props.vertex_group_k != '':
+ group_index = ob.vertex_groups[props.vertex_group_k].index
+ k = bmesh_get_weight_numpy(group_index, dvert_lay, bm.verts)
+ if props.invert_vertex_group_k:
+ vg_bounds = (props.max_k, props.min_k)
+ else:
+ vg_bounds = (props.min_k, props.max_k)
+ k = np.interp(k, (0,1), vg_bounds)
+
+ if props.vertex_group_brush != '':
+ group_index = ob.vertex_groups[props.vertex_group_brush].index
+ brush = bmesh_get_weight_numpy(group_index, dvert_lay, bm.verts)
+ brush *= brush_mult
+
+
+ #timeElapsed = time.time() - start
+ #print('RD - Read Vertex Groups:',timeElapsed)
+ #start = time.time()
+
+ diff_a *= scale
+ diff_b *= scale
+
+ edge_verts = [0]*n_edges*2
+ me.edges.foreach_get("vertices", edge_verts)
+ edge_verts = np.array(edge_verts)
+
+ if 'gradient' in ob.vertex_groups.keys() and False:
+ group_index = ob.vertex_groups['gradient'].index
+ gradient = bmesh_get_weight_numpy(group_index, dvert_lay, bm.verts)
+
+ arr = (np.arange(n_edges)*2).astype(int)
+ id0 = edge_verts[arr]
+ id1 = edge_verts[arr+1]
+
+ #gradient = np.abs(gradient[id0] - gradient[id1])
+ gradient = gradient[id1] - gradient[id0]
+ gradient /= np.max(gradient)
+ sign = np.sign(gradient)
+ sign[sign==0] = 1
+ gradient = (0.05*abs(gradient) + 0.95)*sign
+ #gradient *= (1-abs(gradient)
+ #gradient = 0.2*(1-gradient) + 0.95
+
+ #gradient = get_uv_edge_vectors(me)
+ #uv_dir = Vector((0.5,0.5,0)).normalized()
+ #gradient = np.array([abs(g.dot(uv_dir.normalized())) for g in gradient])
+ #gradient = (gradient + 0.5)/2
+ #gradient = np.array([max(0,g.dot(uv_dir.normalized())) for g in gradient])
+
+ timeElapsed = time.time() - start
+ print(' Preparation Time:',timeElapsed)
+ start = time.time()
+
+ try:
+ _f = f if type(f) is np.ndarray else np.array((f,))
+ _k = k if type(k) is np.ndarray else np.array((k,))
+ _diff_a = diff_a if type(diff_a) is np.ndarray else np.array((diff_a,))
+ _diff_b = diff_b if type(diff_b) is np.ndarray else np.array((diff_b,))
+ _brush = brush if type(brush) is np.ndarray else np.array((brush,))
+
+ #a, b = numba_reaction_diffusion_anisotropic(n_verts, n_edges, edge_verts, a, b, _brush, _diff_a, _diff_b, _f, _k, dt, time_steps, gradient)
+ a, b = numba_reaction_diffusion(n_verts, n_edges, edge_verts, a, b, _brush, _diff_a, _diff_b, _f, _k, dt, time_steps)
+ except:
+ print('Not using Numba! The simulation could be slow.')
+ arr = np.arange(n_edges)*2
+ id0 = edge_verts[arr] # first vertex indices for each edge
+ id1 = edge_verts[arr+1] # second vertex indices for each edge
+ for i in range(time_steps):
+ b += brush
+ lap_a = np.zeros(n_verts)
+ lap_b = np.zeros(n_verts)
+ lap_a0 = a[id1] - a[id0] # laplacian increment for first vertex of each edge
+ lap_b0 = b[id1] - b[id0] # laplacian increment for first vertex of each edge
+
+ np.add.at(lap_a, id0, lap_a0)
+ np.add.at(lap_b, id0, lap_b0)
+ np.add.at(lap_a, id1, -lap_a0)
+ np.add.at(lap_b, id1, -lap_b0)
+
+ ab2 = a*b**2
+ a += eval("(diff_a*lap_a - ab2 + f*(1-a))*dt")
+ b += eval("(diff_b*lap_b + ab2 - (k+f)*b)*dt")
+ #a += (diff_a*lap_a - ab2 + f*(1-a))*dt
+ #b += (diff_b*lap_b + ab2 - (k+f)*b)*dt
+
+ a = nan_to_num(a)
+ b = nan_to_num(b)
+
+ timeElapsed = time.time() - start
+ print(' Simulation Time:',timeElapsed)
+
+ if bake:
+ if not(os.path.exists(folder)):
+ os.mkdir(folder)
+ file_name = folder / "a_{:04d}".format(scene.frame_current)
+ a.tofile(file_name)
+ file_name = folder / "b_{:04d}".format(scene.frame_current)
+ b.tofile(file_name)
+ elif props.bool_cache:
+ try:
+ file_name = folder / "a_{:04d}".format(scene.frame_current)
+ a = np.fromfile(file_name)
+ file_name = folder / "b_{:04d}".format(scene.frame_current)
+ b = np.fromfile(file_name)
+ except:
+ print(' Cannot read cache.')
+ return
+
+ if props.update_weight_a or props.update_weight_b:
+ start = time.time()
+ if props.update_weight_a:
+ if 'A' in ob.vertex_groups.keys():
+ vg_a = ob.vertex_groups['A']
+ else:
+ vg_a = ob.vertex_groups.new(name='A')
+ else:
+ vg_a = None
+ if props.update_weight_b:
+ if 'B' in ob.vertex_groups.keys():
+ vg_b = ob.vertex_groups['B']
+ else:
+ vg_b = ob.vertex_groups.new(name='B')
+ else:
+ vg_b = None
+ if vg_a == vg_b == None:
+ pass
+ else:
+ if ob.mode == 'WEIGHT_PAINT':# or props.bool_cache:
+ # slower, but prevent crashes
+ for i in range(n_verts):
+ if vg_a: vg_a.add([i], a[i], 'REPLACE')
+ if vg_b: vg_b.add([i], b[i], 'REPLACE')
+ else:
+ if props.bool_mod or props.bool_cache:
+ #bm.free() # release old bmesh
+ bm = bmesh.new() # create an empty BMesh
+ bm.from_mesh(ob.data) # fill it in from a Mesh
+ dvert_lay = bm.verts.layers.deform.active
+ # faster, but can cause crashes while painting weight
+ if vg_a: index_a = vg_a.index
+ if vg_b: index_b = vg_b.index
+ for i, v in enumerate(bm.verts):
+ dvert = v[dvert_lay]
+ if vg_a: dvert[index_a] = a[i]
+ if vg_b: dvert[index_b] = b[i]
+ bm.to_mesh(ob.data)
+ bm.free()
+ print(' Writing Vertex Groups Time:',time.time() - start)
+ if props.normalize:
+ min_a = np.min(a)
+ max_a = np.max(a)
+ min_b = np.min(b)
+ max_b = np.max(b)
+ a = (a - min_a)/(max_a - min_a)
+ b = (b - min_b)/(max_b - min_b)
+ split_a = None
+ split_b = None
+ splitted = False
+ if props.update_colors:#_a or props.update_colors_b:
+ start = time.time()
+ loops_size = get_attribute_numpy(me.polygons, attribute='loop_total', mult=1)
+ n_colors = np.sum(loops_size)
+ v_id = np.ones(n_colors)
+ me.polygons.foreach_get('vertices',v_id)
+ v_id = v_id.astype(int)
+ #v_id = np.array([v for p in ob.data.polygons for v in p.vertices])
+ '''
+ if props.update_colors_b:
+ if 'B' in ob.data.vertex_colors.keys():
+ vc = ob.data.vertex_colors['B']
+ else:
+ vc = ob.data.vertex_colors.new(name='B')
+ c_val = b[v_id]
+ c_val = np.repeat(c_val, 4, axis=0)
+ vc.data.foreach_set('color',c_val)
+
+ if props.update_colors_a:
+ if 'A' in ob.data.vertex_colors.keys():
+ vc = ob.data.vertex_colors['A']
+ else:
+ vc = ob.data.vertex_colors.new(name='A')
+ c_val = a[v_id]
+ c_val = np.repeat(c_val, 4, axis=0)
+ vc.data.foreach_set('color',c_val)
+ '''
+ split_a = a[v_id,None]
+ split_b = b[v_id,None]
+ splitted = True
+ ones = np.ones((n_colors,1))
+ #rgba = np.concatenate((split_a,split_b,-split_b+split_a,ones),axis=1).flatten()
+ rgba = np.concatenate((split_a,split_b,ones,ones),axis=1).flatten()
+ if 'AB' in ob.data.vertex_colors.keys():
+ vc = ob.data.vertex_colors['AB']
+ else:
+ vc = ob.data.vertex_colors.new(name='AB')
+ vc.data.foreach_set('color',rgba)
+ ob.data.vertex_colors.update()
+
+ print(' Writing Vertex Colors Time:',time.time() - start)
+ if props.update_uv:
+ start = time.time()
+ if 'AB' in me.uv_layers.keys():
+ uv_layer = me.uv_layers['AB']
+ else:
+ uv_layer = me.uv_layers.new(name='AB')
+ if not splitted:
+ loops_size = get_attribute_numpy(me.polygons, attribute='loop_total', mult=1)
+ n_data = np.sum(loops_size)
+ v_id = np.ones(n_data)
+ me.polygons.foreach_get('vertices',v_id)
+ v_id = v_id.astype(int)
+ split_a = a[v_id,None]
+ split_b = b[v_id,None]
+ uv = np.concatenate((split_a,split_b),axis=1).flatten()
+ uv_layer.data.foreach_set('uv',uv)
+ me.uv_layers.update()
+ print(' Writing UV Map Time:',time.time() - start)
+
+ for ps in ob.particle_systems:
+ if ps.vertex_group_density == 'B' or ps.vertex_group_density == 'A':
+ ps.invert_vertex_group_density = not ps.invert_vertex_group_density
+ ps.invert_vertex_group_density = not ps.invert_vertex_group_density
+
+ if props.bool_mod and not props.bool_cache: bpy.data.meshes.remove(me)
+
+def fast_bake_def(ob, frame_start=1, frame_end=250):
+ scene = bpy.context.scene
+ start = time.time()
+ if type(ob) == bpy.types.Scene: return None
+ props = ob.reaction_diffusion_settings
+
+ # Define cache folder
+ if props.cache_dir == '':
+ letters = string.ascii_letters
+ random_name = ''.join(rnd.choice(letters) for i in range(6))
+ if bpy.context.blend_data.filepath == '':
+ folder = Path(bpy.context.preferences.filepaths.temporary_directory)
+ folder = folder / 'reaction_diffusion_cache' / random_name
+ else:
+ folder = '//' + Path(bpy.context.blend_data.filepath).stem
+ folder = Path(bpy.path.abspath(folder)) / 'reaction_diffusion_cache' / random_name
+ folder.mkdir(parents=True, exist_ok=True)
+ props.cache_dir = str(folder)
+ else:
+ folder = Path(props.cache_dir)
+
+ if props.bool_mod:
+ # hide deforming modifiers
+ mod_visibility = []
+ for m in ob.modifiers:
+ mod_visibility.append(m.show_viewport)
+ if not mod_preserve_shape(m): m.show_viewport = False
+
+ # evaluated mesh
+ dg = bpy.context.evaluated_depsgraph_get()
+ ob_eval = ob.evaluated_get(dg)
+ me = bpy.data.meshes.new_from_object(ob_eval, preserve_all_data_layers=True, depsgraph=dg)
+
+ # set original visibility
+ for v, m in zip(mod_visibility, ob.modifiers):
+ m.show_viewport = v
+ ob.modifiers.update()
+ else:
+ me = ob.data
+
+ bm = bmesh.new() # create an empty BMesh
+ bm.from_mesh(me) # fill it in from a Mesh
+ dvert_lay = bm.verts.layers.deform.active
+ n_edges = len(me.edges)
+ n_verts = len(me.vertices)
+ a = np.zeros(n_verts)
+ b = np.zeros(n_verts)
+ group_index_a = ob.vertex_groups["A"].index
+ group_index_b = ob.vertex_groups["B"].index
+
+ dt = props.dt
+ time_steps = props.time_steps
+ f = props.f
+ k = props.k
+ diff_a = props.diff_a
+ diff_b = props.diff_b
+ scale = props.diff_mult
+
+ brush_mult = props.brush_mult
+
+ # store weight values
+ if 'dB' in ob.vertex_groups: db = np.zeros(n_verts)
+ if 'grad' in ob.vertex_groups: grad = np.zeros(n_verts)
+
+ if props.vertex_group_diff_a != '': diff_a = np.zeros(n_verts)
+ if props.vertex_group_diff_b != '': diff_b = np.zeros(n_verts)
+ if props.vertex_group_scale != '': scale = np.zeros(n_verts)
+ if props.vertex_group_f != '': f = np.zeros(n_verts)
+ if props.vertex_group_k != '': k = np.zeros(n_verts)
+ if props.vertex_group_brush != '': brush = np.zeros(n_verts)
+ else: brush = 0
+
+ a = bmesh_get_weight_numpy(group_index_a, dvert_lay, bm.verts)
+ b = bmesh_get_weight_numpy(group_index_b, dvert_lay, bm.verts)
+
+ if props.vertex_group_diff_a != '':
+ group_index = ob.vertex_groups[props.vertex_group_diff_a].index
+ diff_a = bmesh_get_weight_numpy(group_index, dvert_lay, bm.verts)
+ if props.invert_vertex_group_diff_a:
+ vg_bounds = (props.min_diff_a, props.max_diff_a)
+ else:
+ vg_bounds = (props.max_diff_a, props.min_diff_a)
+ diff_a = np.interp(diff_a, (0,1), vg_bounds)
+
+ if props.vertex_group_diff_b != '':
+ group_index = ob.vertex_groups[props.vertex_group_diff_b].index
+ diff_b = bmesh_get_weight_numpy(group_index, dvert_lay, bm.verts)
+ if props.invert_vertex_group_diff_b:
+ vg_bounds = (props.max_diff_b, props.min_diff_b)
+ else:
+ vg_bounds = (props.min_diff_b, props.max_diff_b)
+ diff_b = np.interp(diff_b, (0,1), vg_bounds)
+
+ if props.vertex_group_scale != '':
+ group_index = ob.vertex_groups[props.vertex_group_scale].index
+ scale = bmesh_get_weight_numpy(group_index, dvert_lay, bm.verts)
+ if props.invert_vertex_group_scale:
+ vg_bounds = (props.max_scale, props.min_scale)
+ else:
+ vg_bounds = (props.min_scale, props.max_scale)
+ scale = np.interp(scale, (0,1), vg_bounds)
+
+ if props.vertex_group_f != '':
+ group_index = ob.vertex_groups[props.vertex_group_f].index
+ f = bmesh_get_weight_numpy(group_index, dvert_lay, bm.verts)
+ if props.invert_vertex_group_f:
+ vg_bounds = (props.max_f, props.min_f)
+ else:
+ vg_bounds = (props.min_f, props.max_f)
+ f = np.interp(f, (0,1), vg_bounds, )
+
+ if props.vertex_group_k != '':
+ group_index = ob.vertex_groups[props.vertex_group_k].index
+ k = bmesh_get_weight_numpy(group_index, dvert_lay, bm.verts)
+ if props.invert_vertex_group_k:
+ vg_bounds = (props.max_k, props.min_k)
+ else:
+ vg_bounds = (props.min_k, props.max_k)
+ k = np.interp(k, (0,1), vg_bounds)
+
+ if props.vertex_group_brush != '':
+ group_index = ob.vertex_groups[props.vertex_group_brush].index
+ brush = bmesh_get_weight_numpy(group_index, dvert_lay, bm.verts)
+ brush *= brush_mult
+
+ diff_a *= scale
+ diff_b *= scale
+
+ edge_verts = [0]*n_edges*2
+ me.edges.foreach_get("vertices", edge_verts)
+
+ gradient = get_uv_edge_vectors(me)
+ uv_dir = Vector((0.5,0.5,0))
+ #gradient = [abs(g.dot(uv_dir)) for g in gradient]
+ gradient = [max(0,g.dot(uv_dir)) for g in gradient]
+
+ timeElapsed = time.time() - start
+ print(' Preparation Time:',timeElapsed)
+ start = time.time()
+
+ try:
+ edge_verts = np.array(edge_verts)
+ _f = f if type(f) is np.ndarray else np.array((f,))
+ _k = k if type(k) is np.ndarray else np.array((k,))
+ _diff_a = diff_a if type(diff_a) is np.ndarray else np.array((diff_a,))
+ _diff_b = diff_b if type(diff_b) is np.ndarray else np.array((diff_b,))
+ _brush = brush if type(brush) is np.ndarray else np.array((brush,))
+
+ run_rd = False
+ for j in range(props.cache_frame_start, props.cache_frame_end+1):
+ start2 = time.time()
+ print("{:6d} Reaction-Diffusion: {}".format(j, ob.name))
+ if run_rd:
+ b += _brush
+ a, b = numba_reaction_diffusion(n_verts, n_edges, edge_verts, a, b, _brush, _diff_a, _diff_b, _f, _k, dt, time_steps)
+ else:
+ run_rd = True
+
+ if not(os.path.exists(folder)):
+ os.mkdir(folder)
+ file_name = folder / "a_{:04d}".format(j)
+ a.tofile(file_name)
+ file_name = folder / "b_{:04d}".format(j)
+ b.tofile(file_name)
+
+ timeElapsed = time.time() - start2
+ print(' Simulation Time:',timeElapsed)
+
+ except:
+ print('Not using Numba! The simulation could be slow.')
+ edge_verts = np.array(edge_verts)
+ arr = np.arange(n_edges)*2
+ id0 = edge_verts[arr] # first vertex indices for each edge
+ id1 = edge_verts[arr+1] # second vertex indices for each edge
+ for j in range(props.cache_frame_start, props.cache_frame_end):
+ for i in range(time_steps):
+ b += brush
+ lap_a = np.zeros(n_verts)
+ lap_b = np.zeros(n_verts)
+ lap_a0 = a[id1] - a[id0] # laplacian increment for first vertex of each edge
+ lap_b0 = b[id1] - b[id0] # laplacian increment for first vertex of each edge
+
+ np.add.at(lap_a, id0, lap_a0)
+ np.add.at(lap_b, id0, lap_b0)
+ np.add.at(lap_a, id1, -lap_a0)
+ np.add.at(lap_b, id1, -lap_b0)
+
+ ab2 = a*b**2
+ a += eval("(diff_a*lap_a - ab2 + f*(1-a))*dt")
+ b += eval("(diff_b*lap_b + ab2 - (k+f)*b)*dt")
+
+ a = nan_to_num(a)
+ b = nan_to_num(b)
+
+ if not(os.path.exists(folder)):
+ os.mkdir(folder)
+ file_name = folder / "a_{:04d}".format(j)
+ a.tofile(file_name)
+ file_name = folder / "b_{:04d}".format(j)
+ b.tofile(file_name)
+
+ if ob.mode == 'WEIGHT_PAINT':
+ # slower, but prevent crashes
+ vg_a = ob.vertex_groups['A']
+ vg_b = ob.vertex_groups['B']
+ for i in range(n_verts):
+ vg_a.add([i], a[i], 'REPLACE')
+ vg_b.add([i], b[i], 'REPLACE')
+ else:
+ if props.bool_mod:
+ bm.free() # release old bmesh
+ bm = bmesh.new() # create an empty BMesh
+ bm.from_mesh(ob.data) # fill it in from a Mesh
+ dvert_lay = bm.verts.layers.deform.active
+ # faster, but can cause crashes while painting weight
+ for i, v in enumerate(bm.verts):
+ dvert = v[dvert_lay]
+ dvert[group_index_a] = a[i]
+ dvert[group_index_b] = b[i]
+ bm.to_mesh(ob.data)
+
+ # Update Vertex Colors
+ if 'A' in ob.data.vertex_colors or 'B' in ob.data.vertex_colors:
+ v_id = np.array([v for p in ob.data.polygons for v in p.vertices])
+
+ if 'B' in ob.data.vertex_colors:
+ c_val = b[v_id]
+ c_val = np.repeat(c_val, 4, axis=0)
+ vc = ob.data.vertex_colors['B']
+ vc.data.foreach_set('color',c_val.tolist())
+
+ if 'A' in ob.data.vertex_colors:
+ c_val = a[v_id]
+ c_val = np.repeat(c_val, 4, axis=0)
+ vc = ob.data.vertex_colors['A']
+ vc.data.foreach_set('color',c_val.tolist())
+
+ for ps in ob.particle_systems:
+ if ps.vertex_group_density == 'B' or ps.vertex_group_density == 'A':
+ ps.invert_vertex_group_density = not ps.invert_vertex_group_density
+ ps.invert_vertex_group_density = not ps.invert_vertex_group_density
+
+ if props.bool_mod: bpy.data.meshes.remove(me)
+ bm.free()
+ timeElapsed = time.time() - start
+ print(' Closing Time:',timeElapsed)
+
+def create_fast_bake_def(ob, frame_start=1, frame_end=250):
+ scene = bpy.context.scene
+ start = time.time()
+ if type(ob) == bpy.types.Scene: return None
+ props = ob.reaction_diffusion_settings
+
+ dt = props.dt
+ time_steps = props.time_steps
+ scale = props.diff_mult
+
+ if props.cache_dir == '':
+ letters = string.ascii_letters
+ random_name = ''.join(rnd.choice(letters) for i in range(6))
+ if bpy.context.blend_data.filepath == '':
+ folder = Path(bpy.context.preferences.filepaths.temporary_directory)
+ folder = folder / 'reaction_diffusion_cache' / random_name
+ else:
+ folder = '//' + Path(bpy.context.blend_data.filepath).stem
+ folder = Path(bpy.path.abspath(folder)) / 'reaction_diffusion_cache' / random_name
+ folder.mkdir(parents=True, exist_ok=True)
+ props.cache_dir = str(folder)
+ else:
+ folder = Path(props.cache_dir)
+
+ if props.bool_mod:
+ # hide deforming modifiers
+ mod_visibility = []
+ for m in ob.modifiers:
+ mod_visibility.append(m.show_viewport)
+ if not mod_preserve_shape(m): m.show_viewport = False
+
+ # evaluated mesh
+ dg = bpy.context.evaluated_depsgraph_get()
+ ob_eval = ob.evaluated_get(dg)
+ me = bpy.data.meshes.new_from_object(ob_eval, preserve_all_data_layers=True, depsgraph=dg)
+
+ # set original visibility
+ for v, m in zip(mod_visibility, ob.modifiers):
+ m.show_viewport = v
+ ob.modifiers.update()
+ else:
+ me = ob.data
+
+ bm = bmesh.new() # create an empty BMesh
+ bm.from_mesh(me) # fill it in from a Mesh
+ verts = get_vertices_numpy(me)
+ dvert_lay = bm.verts.layers.deform.active
+ n_edges = len(me.edges)
+ n_verts = len(me.vertices)
+ group_index_x = ob.vertex_groups["x"].index
+ group_index_y = ob.vertex_groups["y"].index
+ group_index_module = ob.vertex_groups["module"].index
+ group_index_values = ob.vertex_groups["values"].index
+
+ if not props.bool_cache:
+ time_steps = props.time_steps
+
+ # store weight values
+ if 'dB' in ob.vertex_groups: db = np.zeros(n_verts)
+ if 'grad' in ob.vertex_groups: grad = np.zeros(n_verts)
+ vec_x = np.zeros(n_verts)
+ vec_y = np.zeros(n_verts)
+ vec_module = np.zeros(n_verts)
+ values = np.zeros(n_verts)
+
+ vec_x = bmesh_get_weight_numpy(group_index_x, dvert_lay, bm.verts)
+ vec_y = bmesh_get_weight_numpy(group_index_y, dvert_lay, bm.verts)
+ vec_module = bmesh_get_weight_numpy(group_index_module, dvert_lay, bm.verts)
+ values = bmesh_get_weight_numpy(group_index_values, dvert_lay, bm.verts)
+ field = np.concatenate((vec_x[:,None],vec_y[:,None],vec_y[:,None]*0),axis=1)
+ field = field*2-1
+ field[:,2] = 0
+ edge_verts = get_edges_numpy(me)
+
+ id0 = edge_verts[:,0]
+ id1 = edge_verts[:,1]
+ vert0 = verts[id0]
+ vert1 = verts[id1]
+ vec = vert1-vert0
+ edge_field = (field[id0] + field[id1])/2 # average vector associated to the edge
+ print(vert0.shape)
+ print(field.shape)
+ print(edge_field.shape)
+ # normalize vectors
+ vec /= np.linalg.norm(vec,axis=1)[:,None]
+ edge_field /= np.linalg.norm(edge_field,axis=1)[:,None]
+ edge_flow = np.einsum('...j,...j', vec, edge_field)
+ #sign = (edge_flow>0).astype(int)
+ #values[edge_verts[sign]] += values[edge_verts[1-sign]]*
+ #values[verts0] += values[verts1]*edge_flow
+
+ timeElapsed = time.time() - start
+ print(' Preparation Time:',timeElapsed)
+ start = time.time()
+
+ # Preserve energy
+ mult = np.zeros(values.shape)
+ #mult[id0] -= edge_flow
+ #mult[id1] += edge_flow
+ np.add.at(mult,id0,-edge_flow)
+ np.add.at(mult,id1,edge_flow)
+ print("mult")
+ mult = scale/mult
+ print(mult)
+ print(np.sum(mult))
+
+
+ #try:
+ print(vec)
+ print(edge_flow)
+ print(edge_flow)
+
+ bool_run = False
+ for j in range(props.cache_frame_start, props.cache_frame_end+1):
+ start2 = time.time()
+ print("{:6d} Reaction-Diffusion: {}".format(j, ob.name))
+ if bool_run:
+ print(values)
+ #for i in range(1):
+ values = integrate_field(n_edges,id0,id1,values,edge_flow,mult,time_steps)
+ #values0 = values
+ #np.add.at(values, id0, values0[id1]*edge_flow*mult[id1])
+ #np.add.at(values, id1, -values0[id0]*edge_flow*mult[id0])
+ #np.add.at(values, id0, values0[id1]*edge_flow*mult)
+ #np.add.at(values, id1, -values0[id0]*edge_flow*mult)
+ #values[id1] += values0[id0]*edge_flow/mult[id1]*dt
+ #values[id0] -= values0[id1]*edge_flow/mult[id0]*dt
+ #values[id1] = edge_flow
+ #values[id1] += edge_flow
+ #a, b = numba_reaction_diffusion(n_verts, n_edges, edge_verts, a, b, _brush, _diff_a, _diff_b, _f, _k, dt, time_steps)
+
+ '''
+ lap_a = np.zeros(n_verts)
+ lap_b = np.zeros(n_verts)
+ lap_a0 = a[id1] - a[id0] # laplacian increment for first vertex of each edge
+ lap_b0 = b[id1] - b[id0] # laplacian increment for first vertex of each edge
+
+ np.add.at(lap_a, id0, lap_a0)
+ np.add.at(lap_b, id0, lap_b0)
+ np.add.at(lap_a, id1, -lap_a0)
+ np.add.at(lap_b, id1, -lap_b0)
+ '''
+ else:
+ bool_run = True
+
+ if not(os.path.exists(folder)):
+ os.mkdir(folder)
+ file_name = folder / "a_{:04d}".format(j)
+ values.tofile(file_name)
+ file_name = folder / "b_{:04d}".format(j)
+ values.tofile(file_name)
+
+
+ timeElapsed = time.time() - start2
+ print(' Simulation Time:',timeElapsed)
+
+ if props.bool_mod: bpy.data.meshes.remove(me)
+ bm.free()
+ timeElapsed = time.time() - start
+ print(' Closing Time:',timeElapsed)
+
+
+
+
+
+class TISSUE_PT_reaction_diffusion(Panel):
+ bl_space_type = 'PROPERTIES'
+ bl_region_type = 'WINDOW'
+ bl_context = "data"
+ bl_label = "Tissue Reaction-Diffusion"
+ bl_options = {'DEFAULT_CLOSED'}
+
+ @classmethod
+ def poll(cls, context):
+ return 'A' and 'B' in context.object.vertex_groups
+
+ def draw(self, context):
+ reaction_diffusion_add_handler(self, context)
+
+ ob = context.object
+ props = ob.reaction_diffusion_settings
+ layout = self.layout
+ col = layout.column(align=True)
+ row = col.row(align=True)
+ if not ("A" and "B" in ob.vertex_groups):
+ row.operator("object.start_reaction_diffusion",
+ icon="EXPERIMENTAL",
+ text="Reaction-Diffusion")
+ else:
+ row.operator("object.start_reaction_diffusion",
+ icon="EXPERIMENTAL",
+ text="Reset Reaction-Diffusion")
+ row = col.row(align=True)
+ row.prop(props, "run", text="Run Reaction-Diffusion")
+ col = layout.column(align=True)
+ row = col.row(align=True)
+ row.prop(props, "time_steps")
+ row.prop(props, "dt")
+ row.enabled = not props.bool_cache
+ col.separator()
+ row = col.row(align=True)
+ col1 = row.column(align=True)
+ col1.prop(props, "diff_a")
+ col1.enabled = props.vertex_group_diff_a == '' and not props.bool_cache
+ col1 = row.column(align=True)
+ col1.prop(props, "diff_b")
+ col1.enabled = props.vertex_group_diff_b == '' and not props.bool_cache
+ row = col.row(align=True)
+ row.prop(props, "diff_mult")
+ row.enabled = props.vertex_group_scale == '' and not props.bool_cache
+ #col.separator()
+ row = col.row(align=True)
+ col1 = row.column(align=True)
+ col1.prop(props, "f")
+ col1.enabled = props.vertex_group_f == '' and not props.bool_cache
+ col1 = row.column(align=True)
+ col1.prop(props, "k")
+ col1.enabled = props.vertex_group_k == '' and not props.bool_cache
+ col.separator()
+ col.label(text='Cache:')
+ #col.prop(props, "bool_cache")
+ col.prop(props, "cache_dir", text='')
+ col.separator()
+ row = col.row(align=True)
+ row.prop(props, "cache_frame_start")
+ row.prop(props, "cache_frame_end")
+ col.separator()
+ if props.bool_cache:
+ col.operator("object.reaction_diffusion_free_data")
+ else:
+ row = col.row(align=True)
+ row.operator("object.bake_reaction_diffusion")
+ file = bpy.context.blend_data.filepath
+ temp = bpy.context.preferences.filepaths.temporary_directory
+ if file == temp == props.cache_dir == '':
+ row.enabled = False
+ col.label(text="Cannot use cache", icon='ERROR')
+ col.label(text='please save the Blender or set a Cache directory')
+ col.prop(props, "fast_bake")
+
+ col.separator()
+ col.label(text='Output attributes:')
+ row = col.row(align=True)
+ col2 = row.column(align=True)
+ row2 = col2.row(align=True)
+ row2.prop(props, "update_weight_a", icon='GROUP_VERTEX', text='A')
+ row2.prop(props, "update_weight_b", icon='GROUP_VERTEX', text='B')
+ col2.enabled = props.bool_cache
+ row.separator()
+ #row.prop(props, "update_colors_a", icon='GROUP_VCOL', text='A')
+ #row.prop(props, "update_colors_b", icon='GROUP_VCOL', text='B')
+ row.prop(props, "update_colors", icon='GROUP_VCOL', text='AB')
+ row.separator()
+ row.prop(props, "update_uv", icon='GROUP_UVS', text='AB')
+ col.prop(props,'normalize')
+
+ #col.prop_search(props, 'vertex_group_diff_a', ob, "vertex_groups", text='Diff A')
+ #col.prop_search(props, 'vertex_group_diff_b', ob, "vertex_groups", text='Diff B')
+ #col.prop_search(props, 'vertex_group_scale', ob, "vertex_groups", text='Scale')
+ #col.prop_search(props, 'vertex_group_f', ob, "vertex_groups", text='f')
+ #col.prop_search(props, 'vertex_group_k', ob, "vertex_groups", text='k')
+
+
+class TISSUE_PT_reaction_diffusion_weight(Panel):
+ bl_space_type = 'PROPERTIES'
+ bl_region_type = 'WINDOW'
+ bl_context = "data"
+ bl_parent_id = "TISSUE_PT_reaction_diffusion"
+ bl_label = "Vertex Groups"
+ bl_options = {'DEFAULT_CLOSED'}
+
+ @classmethod
+ def poll(cls, context):
+ return 'A' and 'B' in context.object.vertex_groups
+
+ def draw(self, context):
+ ob = context.object
+ props = ob.reaction_diffusion_settings
+ layout = self.layout
+ #layout.use_property_split = True
+ col = layout.column(align=True)
+ col.prop(props, "bool_mod")
+ if props.bool_mod and props.fast_bake:
+ col.label(text="When Fast Bake is on, the modifiers", icon='ERROR')
+ col.label(text=" are used only for the first frame")
+ col.separator()
+ insert_weight_parameter(col, ob, 'brush', text='Brush:')
+ insert_weight_parameter(col, ob, 'diff_a', text='Diff A:')
+ insert_weight_parameter(col, ob, 'diff_b', text='Diff B:')
+ insert_weight_parameter(col, ob, 'scale', text='Scale:')
+ insert_weight_parameter(col, ob, 'f', text='f:')
+ insert_weight_parameter(col, ob, 'k', text='k:')
+ col.enabled = not props.bool_cache
+
+def insert_weight_parameter(col, ob, name, text=''):
+ props = ob.reaction_diffusion_settings
+ split = col.split(factor=0.25, align=True)
+ col2 = split.column(align=True)
+ col2.label(text=text)
+ col2 = split.column(align=True)
+ row2 = col2.row(align=True)
+ row2.prop_search(props, 'vertex_group_' + name, ob, "vertex_groups", text='')
+ if name != 'brush':
+ row2.prop(props, "invert_vertex_group_" + name, text="", toggle=True, icon='ARROW_LEFTRIGHT')
+ if 'vertex_group_' + name in props:
+ if props['vertex_group_' + name] != '':
+ if name == 'brush':
+ col2.prop(props, "brush_mult")
+ else:
+ row2 = col2.row(align=True)
+ row2.prop(props, "min_" + name, text="Min")
+ row2 = col2.row(align=True)
+ row2.prop(props, "max_" + name, text="Max")
+ col.separator()
+
+def contour_edges_pattern(operator, c, verts_count, iso_val, vertices, normals, filtered_edges, weight, pattern_weight, bevel_weight):
+ # vertices indexes
+ id0 = filtered_edges[:,0]
+ id1 = filtered_edges[:,1]
+ # vertices weight
+ w0 = weight[id0]
+ w1 = weight[id1]
+ # weight condition
+ bool_w0 = w0 < iso_val
+ bool_w1 = w1 < iso_val
+
+ # mask all edges that have one weight value below the iso value
+ mask_new_verts = np.logical_xor(bool_w0, bool_w1)
+ if not mask_new_verts.any():
+ return np.array([[None]]), {}, np.array([[None]]), np.array([[None]])
+
+ id0 = id0[mask_new_verts]
+ id1 = id1[mask_new_verts]
+ # filter arrays
+ v0 = vertices[id0]
+ v1 = vertices[id1]
+ n0 = normals[id0]
+ n1 = normals[id1]
+ w0 = w0[mask_new_verts]
+ w1 = w1[mask_new_verts]
+ pattern0 = pattern_weight[id0]
+ pattern1 = pattern_weight[id1]
+ try:
+ bevel0 = bevel_weight[id0]
+ bevel1 = bevel_weight[id1]
+ except: pass
+
+ ### Spiral
+ #edge_nor = (n0+n1)/2
+ #shift = np.arctan2(edge_nor[:,0], edge_nor[:,1])/2/pi*delta_iso
+
+ #param = (iso_val + shift - w0)/(w1-w0)
+ param = (iso_val - w0)/(w1-w0)
+ # pattern displace
+ #mult = 1 if c%2 == 0 else -1
+ if c%(operator.in_steps + operator.out_steps) < operator.in_steps:
+ mult = operator.in_displace
+ else:
+ mult = operator.out_displace
+ pattern_value = pattern0 + (pattern1-pattern0)*param
+ try:
+ bevel_value = bevel0 + (bevel1-bevel0)*param
+ bevel_value = np.expand_dims(bevel_value,axis=1)
+ except: bevel_value = None
+ disp = pattern_value * mult
+
+ param = np.expand_dims(param,axis=1)
+ disp = np.expand_dims(disp,axis=1)
+ verts = v0 + (v1-v0)*param
+ norm = n0 + (n1-n0)*param
+ if operator.limit_z: disp *= 1-abs(np.expand_dims(norm[:,2], axis=1))
+ verts = verts + norm*disp
+ #verts = verts[np.flip(np.argsort(shift))]
+ #verts = verts[np.argsort(shift)]
+
+ # indexes of edges with new vertices
+ edges_index = filtered_edges[mask_new_verts][:,2]
+
+ # remove all edges completely below the iso value
+ #mask_edges = np.logical_not(np.logical_and(bool_w0, bool_w1))
+ #filtered_edges = filtered_edges[mask_edges]
+ return filtered_edges, edges_index, verts, bevel_value
+
+def contour_bmesh(me, bm, weight, iso_val):
+ bm.verts.ensure_lookup_table()
+ bm.edges.ensure_lookup_table()
+ bm.faces.ensure_lookup_table()
+
+ # store weight values
+
+ vertices = get_vertices_numpy(me)
+ faces_mask = np.array(bm.faces)
+ filtered_edges = get_edges_id_numpy(me)
+ n_verts = len(bm.verts)
+
+ #############################
+
+ # vertices indexes
+ id0 = filtered_edges[:,0]
+ id1 = filtered_edges[:,1]
+ # vertices weight
+ w0 = weight[id0]
+ w1 = weight[id1]
+ # weight condition
+ bool_w0 = w0 < iso_val
+ bool_w1 = w1 < iso_val
+
+ # mask all edges that have one weight value below the iso value
+ mask_new_verts = np.logical_xor(bool_w0, bool_w1)
+ if not mask_new_verts.any(): return np.array([[None]]), {}, np.array([[None]])
+
+ id0 = id0[mask_new_verts]
+ id1 = id1[mask_new_verts]
+ # filter arrays
+ v0 = vertices[id0]
+ v1 = vertices[id1]
+ w0 = w0[mask_new_verts]
+ w1 = w1[mask_new_verts]
+ param = (iso_val-w0)/(w1-w0)
+ param = np.expand_dims(param,axis=1)
+ verts = v0 + (v1-v0)*param
+
+ # indexes of edges with new vertices
+ #edges_index = filtered_edges[mask_new_verts][:,2]
+
+ edges_id = {}
+ for i, e in enumerate(filtered_edges):
+ #edges_id[id] = i + n_verts
+ edges_id['{}_{}'.format(e[0],e[1])] = i + n_verts
+ edges_id['{}_{}'.format(e[1],e[0])] = i + n_verts
+
+
+ '''
+ for e in filtered_edges:
+ id0 = e.verts[0].index
+ id1 = e.verts[1].index
+ w0 = weight[id0]
+ w1 = weight[id1]
+
+ if w0 == w1: continue
+ elif w0 > iso_val and w1 > iso_val:
+ continue
+ elif w0 < iso_val and w1 < iso_val: continue
+ elif w0 == iso_val or w1 == iso_val: continue
+ else:
+ v0 = me0.vertices[id0].co
+ v1 = me0.vertices[id1].co
+ v = v0.lerp(v1, (iso_val-w0)/(w1-w0))
+ delete_edges.append(e)
+ verts.append(v)
+ edges_id[str(id0)+"_"+str(id1)] = count
+ edges_id[str(id1)+"_"+str(id0)] = count
+ count += 1
+ '''
+
+ splitted_faces = []
+
+ switch = False
+ # splitting faces
+ for f in faces_mask:
+ # create sub-faces slots. Once a new vertex is reached it will
+ # change slot, storing the next vertices for a new face.
+ build_faces = [[],[]]
+ #switch = False
+ verts0 = list(me.polygons[f.index].vertices)
+ verts1 = list(verts0)
+ verts1.append(verts1.pop(0)) # shift list
+ for id0, id1 in zip(verts0, verts1):
+
+ # add first vertex to active slot
+ build_faces[switch].append(id0)
+
+ # try to split edge
+ try:
+ # check if the edge must be splitted
+ new_vert = edges_id['{}_{}'.format(id0,id1)]
+ # add new vertex
+ build_faces[switch].append(new_vert)
+ # if there is an open face on the other slot
+ if len(build_faces[not switch]) > 0:
+ # store actual face
+ splitted_faces.append(build_faces[switch])
+ # reset actual faces and switch
+ build_faces[switch] = []
+ # change face slot
+ switch = not switch
+ # continue previous face
+ build_faces[switch].append(new_vert)
+ except: pass
+ if len(build_faces[not switch]) == 2:
+ build_faces[not switch].append(id0)
+ if len(build_faces[not switch]) > 2:
+ splitted_faces.append(build_faces[not switch])
+ # add last face
+ splitted_faces.append(build_faces[switch])
+
+ # adding new vertices use fast local method access
+ _new_vert = bm.verts.new
+ for v in verts: _new_vert(v)
+ bm.verts.ensure_lookup_table()
+
+ # deleting old edges/faces
+ bm.edges.ensure_lookup_table()
+ remove_edges = [bm.edges[i] for i in filtered_edges[:,2]]
+ #for e in remove_edges: bm.edges.remove(e)
+ #for e in delete_edges: bm.edges.remove(e)
+
+ bm.verts.ensure_lookup_table()
+ # adding new faces use fast local method access
+ _new_face = bm.faces.new
+ missed_faces = []
+ for f in splitted_faces:
+ try:
+ face_verts = [bm.verts[i] for i in f]
+ _new_face(face_verts)
+ except:
+ missed_faces.append(f)
+
+ #me = bpy.data.meshes.new('_tissue_tmp_')
+ bm.to_mesh(me)
+ weight = np.concatenate((weight, np.ones(len(verts))*iso_val))
+
+ return me, bm, weight
+
+
+
+
+class tissue_weight_streamlines(Operator):
+ bl_idname = "object.tissue_weight_streamlines"
+ bl_label = "Streamlines Curves"
+ bl_description = ("")
+ bl_options = {'REGISTER', 'UNDO'}
+
+ mode : EnumProperty(
+ items=(
+ ('VERTS', "Verts", "Follow vertices"),
+ ('EDGES', "Edges", "Follow Edges")
+ ),
+ default='VERTS',
+ name="Streamlines path mode"
+ )
+
+ interpolation : EnumProperty(
+ items=(
+ ('POLY', "Poly", "Generate Polylines"),
+ ('NURBS', "NURBS", "Generate Nurbs curves")
+ ),
+ default='POLY',
+ name="Interpolation mode"
+ )
+
+ use_modifiers : BoolProperty(
+ name="Use Modifiers", default=True,
+ description="Apply all the modifiers")
+
+ use_selected : BoolProperty(
+ name="Use Selected Vertices", default=False,
+ description="Use selected vertices as Seed")
+
+ same_weight : BoolProperty(
+ name="Same Weight", default=True,
+ description="Continue the streamlines when the weight is the same")
+
+ min_iso : FloatProperty(
+ name="Min Value", default=0., soft_min=0, soft_max=1,
+ description="Minimum weight value")
+ max_iso : FloatProperty(
+ name="Max Value", default=1, soft_min=0, soft_max=1,
+ description="Maximum weight value")
+
+ rand_seed : IntProperty(
+ name="Seed", default=0, min=0, soft_max=10,
+ description="Random Seed")
+ n_curves : IntProperty(
+ name="Curves", default=50, soft_min=1, soft_max=100000,
+ description="Number of Curves")
+ min_rad = 1
+ max_rad = 1
+
+ pos_steps : IntProperty(
+ name="High Steps", default=50, min=0, soft_max=100,
+ description="Number of steps in the direction of high weight")
+ neg_steps : IntProperty(
+ name="Low Steps", default=50, min=0, soft_max=100,
+ description="Number of steps in the direction of low weight")
+
+ bevel_depth : FloatProperty(
+ name="Bevel Depth", default=0, min=0, soft_max=1,
+ description="")
+ min_bevel_depth : FloatProperty(
+ name="Min Bevel Depth", default=0.1, min=0, soft_max=1,
+ description="")
+ max_bevel_depth : FloatProperty(
+ name="Max Bevel Depth", default=1, min=0, soft_max=1,
+ description="")
+
+ rand_dir : FloatProperty(
+ name="Randomize", default=0, min=0, max=1,
+ description="Randomize streamlines directions (Slower)")
+
+ vertex_group_seeds : StringProperty(
+ name="Displace", default='',
+ description="Vertex Group used for pattern displace")
+
+ vertex_group_bevel : StringProperty(
+ name="Bevel", default='',
+ description="Variable Bevel depth")
+
+ object_name : StringProperty(
+ name="Active Object", default='',
+ description="")
+
+ try: vg_name = bpy.context.object.vertex_groups.active.name
+ except: vg_name = ''
+
+ vertex_group_streamlines : StringProperty(
+ name="Flow", default=vg_name,
+ description="Vertex Group used for streamlines")
+
+ @classmethod
+ def poll(cls, context):
+ ob = context.object
+ return ob and len(ob.vertex_groups) > 0 or ob.type == 'CURVE'
+
+ def invoke(self, context, event):
+ return context.window_manager.invoke_props_dialog(self, width=250)
+
+ def draw(self, context):
+ if not context.object.type == 'CURVE':
+ self.object_name = context.object.name
+ ob = bpy.data.objects[self.object_name]
+ if self.vertex_group_streamlines not in [vg.name for vg in ob.vertex_groups]:
+ self.vertex_group_streamlines = ob.vertex_groups.active.name
+ layout = self.layout
+ col = layout.column(align=True)
+ row = col.row(align=True)
+ row.prop(self, 'mode', expand=True,
+ slider=True, toggle=False, icon_only=False, event=False,
+ full_event=False, emboss=True, index=-1)
+ col.prop(self, "use_modifiers")
+ col.label(text="Streamlines Curves:")
+ row = col.row(align=True)
+ row.prop(self, 'interpolation', expand=True,
+ slider=True, toggle=False, icon_only=False, event=False,
+ full_event=False, emboss=True, index=-1)
+ col.separator()
+ col.prop_search(self, 'vertex_group_streamlines', ob, "vertex_groups", text='')
+ if not (self.use_selected or context.mode == 'EDIT_MESH'):
+ row = col.row(align=True)
+ row.prop(self,'n_curves')
+ #row.enabled = context.mode != 'EDIT_MESH'
+ row = col.row(align=True)
+ row.prop(self,'rand_seed')
+ #row.enabled = context.mode != 'EDIT_MESH'
+ row = col.row(align=True)
+ row.prop(self,'neg_steps')
+ row.prop(self,'pos_steps')
+ #row = col.row(align=True)
+ #row.prop(self,'min_iso')
+ #row.prop(self,'max_iso')
+ col.prop(self, "same_weight")
+ col.separator()
+ col.label(text='Curves Bevel:')
+ col.prop_search(self, 'vertex_group_bevel', ob, "vertex_groups", text='')
+ if self.vertex_group_bevel != '':
+ row = col.row(align=True)
+ row.prop(self,'min_bevel_depth')
+ row.prop(self,'max_bevel_depth')
+ else:
+ col.prop(self,'bevel_depth')
+ col.separator()
+ col.prop(self, "rand_dir")
+
+ def execute(self, context):
+ start_time = timeit.default_timer()
+ try:
+ check = context.object.vertex_groups[0]
+ except:
+ self.report({'ERROR'}, "The object doesn't have Vertex Groups")
+ return {'CANCELLED'}
+ ob = bpy.data.objects[self.object_name]
+ ob.select_set(False)
+
+
+ seeds = []
+
+ if bpy.context.mode == 'EDIT_MESH':
+ self.use_selected = True
+ bpy.ops.object.mode_set(mode='OBJECT')
+ #ob = bpy.context.object
+ #me = simple_to_mesh(ob)
+ ob = convert_object_to_mesh(ob, apply_modifiers=self.use_modifiers)
+ #dg = context.evaluated_depsgraph_get()
+ #ob = ob.evaluated_get(dg)
+ me = ob.data
+
+ if self.use_selected:
+ # generate new bmesh
+ bm = bmesh.new()
+ bm.from_mesh(me)
+ print(len(me.vertices))
+ #for v in me.vertices:
+ # if v.select: seeds.append(v.index)
+ for v in bm.verts:
+ if v.select: seeds.append(v.index)
+ bm.free()
+ n_verts = len(me.vertices)
+ n_edges = len(me.edges)
+ n_faces = len(me.polygons)
+
+ # store weight values
+ try:
+ weight = get_weight_numpy(ob.vertex_groups[self.vertex_group_streamlines], n_verts)
+ except:
+ bpy.data.objects.remove(ob)
+ self.report({'ERROR'}, "Please select a Vertex Group for streamlines")
+ return {'CANCELLED'}
+
+ variable_bevel = False
+ bevel_weight = None
+ bevel_depth = self.bevel_depth
+ try:
+ if self.min_bevel_depth == self.max_bevel_depth:
+ #bevel_weight = np.ones((n_verts))
+ bevel_depth = self.min_bevel_depth
+ else:
+ b0 = min(self.min_bevel_depth, self.max_bevel_depth)
+ b1 = max(self.min_bevel_depth, self.max_bevel_depth)
+ bevel_weight = get_weight_numpy(ob.vertex_groups[self.vertex_group_bevel], n_verts)
+ if self.min_bevel_depth > self.max_bevel_depth:
+ bevel_weight = 1-bevel_weight
+ bevel_weight = b0/b1 + bevel_weight*((b1-b0)/b1)
+ bevel_depth = b1
+ variable_bevel = True
+ except:
+ pass#bevel_weight = np.ones((n_verts))
+
+
+ if not seeds:
+ np.random.seed(self.rand_seed)
+ seeds = np.random.randint(n_verts, size=self.n_curves)
+
+ #weight = np.array(get_weight(ob.vertex_groups.active, n_verts))
+
+ curves_pts = []
+ curves_weight = []
+
+ neigh = [[] for i in range(n_verts)]
+ if self.mode == 'EDGES':
+ # store neighbors
+ for e in me.edges:
+ ev = e.vertices
+ neigh[ev[0]].append(ev[1])
+ neigh[ev[1]].append(ev[0])
+
+ elif self.mode == 'VERTS':
+ # store neighbors
+ for p in me.polygons:
+ face_verts = [v for v in p.vertices]
+ n_face_verts = len(face_verts)
+ for i in range(n_face_verts):
+ fv = face_verts.copy()
+ neigh[fv.pop(i)] += fv
+
+ neigh_weight = [weight[n].tolist() for n in neigh]
+
+ # evaluate direction
+ next_vert = [-1]*n_verts
+
+ if self.rand_dir > 0:
+ for i in range(n_verts):
+ n = neigh[i]
+ nw = neigh_weight[i]
+ sorted_nw = neigh_weight[i].copy()
+ sorted_nw.sort()
+ for w in sorted_nw:
+ neigh[i] = [n[nw.index(w)] for w in sorted_nw]
+ else:
+ if self.pos_steps > 0:
+ for i in range(n_verts):
+ n = neigh[i]
+ nw = neigh_weight[i]
+ max_w = max(nw)
+ if self.same_weight:
+ if max_w >= weight[i]:
+ next_vert[i] = n[nw.index(max(nw))]
+ else:
+ if max_w > weight[i]:
+ next_vert[i] = n[nw.index(max(nw))]
+
+ if self.neg_steps > 0:
+ prev_vert = [-1]*n_verts
+ for i in range(n_verts):
+ n = neigh[i]
+ nw = neigh_weight[i]
+ min_w = min(nw)
+ if self.same_weight:
+ if min_w <= weight[i]:
+ prev_vert[i] = n[nw.index(min(nw))]
+ else:
+ if min_w < weight[i]:
+ prev_vert[i] = n[nw.index(min(nw))]
+
+ co = [0]*3*n_verts
+ me.vertices.foreach_get('co', co)
+ co = np.array(co).reshape((-1,3))
+
+ # create streamlines
+ curves = []
+ for i in seeds:
+ next_pts = [i]
+ for j in range(self.pos_steps):
+ if self.rand_dir > 0:
+ n = neigh[next_pts[-1]]
+ next = n[int(len(n) * (1-random.random() * self.rand_dir))]
+ else:
+ next = next_vert[next_pts[-1]]
+ if next > 0:
+ if next not in next_pts: next_pts.append(next)
+ else: break
+
+ prev_pts = [i]
+ for j in range(self.neg_steps):
+ if self.rand_dir > 0:
+ n = neigh[prev_pts[-1]]
+ prev = n[int(len(n) * random.random() * self.rand_dir)]
+ else:
+ prev = prev_vert[prev_pts[-1]]
+ if prev > 0:
+ if prev not in prev_pts:
+ prev_pts.append(prev)
+ else: break
+
+ next_pts = np.array(next_pts).astype('int')
+ prev_pts = np.flip(prev_pts[1:]).astype('int')
+ all_pts = np.concatenate((prev_pts, next_pts))
+ if len(all_pts) > 1:
+ curves.append(all_pts)
+ crv = nurbs_from_vertices(curves, co, bevel_weight, ob.name + '_Streamlines', True, self.interpolation)
+ crv.data.bevel_depth = bevel_depth
+ crv.matrix_world = ob.matrix_world
+ bpy.data.objects.remove(ob)
+
+ print("Streamlines Curves, total time: " + str(timeit.default_timer() - start_time) + " sec")
+ return {'FINISHED'}
diff --git a/node_wrangler.py b/node_wrangler.py
index ddc7ad50..19ccf0c5 100644
--- a/node_wrangler.py
+++ b/node_wrangler.py
@@ -716,6 +716,11 @@ def autolink(node1, node2, links):
print("Could not make a link from " + node1.name + " to " + node2.name)
return link_made
+def abs_node_location(node):
+ abs_location = node.location
+ if node.parent is None:
+ return abs_location
+ return abs_location + abs_node_location(node.parent)
def node_at_pos(nodes, context, event):
nodes_under_mouse = []
@@ -730,23 +735,10 @@ def node_at_pos(nodes, context, event):
for node in nodes:
skipnode = False
if node.type != 'FRAME': # no point trying to link to a frame node
- locx = node.location.x
- locy = node.location.y
dimx = node.dimensions.x/dpifac()
dimy = node.dimensions.y/dpifac()
- if node.parent:
- locx += node.parent.location.x
- locy += node.parent.location.y
- if node.parent.parent:
- locx += node.parent.parent.location.x
- locy += node.parent.parent.location.y
- if node.parent.parent.parent:
- locx += node.parent.parent.parent.location.x
- locy += node.parent.parent.parent.location.y
- if node.parent.parent.parent.parent:
- # Support three levels or parenting
- # There's got to be a better way to do this...
- skipnode = True
+ locx, locy = abs_node_location(node)
+
if not skipnode:
node_points_with_dist.append([node, hypot(x - locx, y - locy)]) # Top Left
node_points_with_dist.append([node, hypot(x - (locx + dimx), y - locy)]) # Top Right
@@ -762,13 +754,9 @@ def node_at_pos(nodes, context, event):
for node in nodes:
if node.type != 'FRAME' and skipnode == False:
- locx = node.location.x
- locy = node.location.y
+ locx, locy = abs_node_location(node)
dimx = node.dimensions.x/dpifac()
dimy = node.dimensions.y/dpifac()
- if node.parent:
- locx += node.parent.location.x
- locy += node.parent.location.y
if (locx <= x <= locx + dimx) and \
(locy - dimy <= y <= locy):
nodes_under_mouse.append(node)
@@ -823,26 +811,19 @@ def draw_circle_2d_filled(shader, mx, my, radius, colour=(1.0, 1.0, 1.0, 0.7)):
shader.uniform_float("color", colour)
batch.draw(shader)
+
def draw_rounded_node_border(shader, node, radius=8, colour=(1.0, 1.0, 1.0, 0.7)):
area_width = bpy.context.area.width - (16*dpifac()) - 1
bottom_bar = (16*dpifac()) + 1
sides = 16
radius = radius*dpifac()
- nlocx = (node.location.x+1)*dpifac()
- nlocy = (node.location.y+1)*dpifac()
+ nlocx, nlocy = abs_node_location(node)
+
+ nlocx = (nlocx+1)*dpifac()
+ nlocy = (nlocy+1)*dpifac()
ndimx = node.dimensions.x
ndimy = node.dimensions.y
- # This is a stupid way to do this... TODO use while loop
- if node.parent:
- nlocx += node.parent.location.x
- nlocy += node.parent.location.y
- if node.parent.parent:
- nlocx += node.parent.parent.location.x
- nlocy += node.parent.parent.location.y
- if node.parent.parent.parent:
- nlocx += node.parent.parent.parent.location.x
- nlocy += node.parent.parent.parent.location.y
if node.hide:
nlocx += -1
diff --git a/precision_drawing_tools/__init__.py b/precision_drawing_tools/__init__.py
index 9d3ae175..c1769062 100644
--- a/precision_drawing_tools/__init__.py
+++ b/precision_drawing_tools/__init__.py
@@ -18,7 +18,7 @@ bl_info = {
"location": "View3D > UI > PDT",
"description": "Precision Drawing Tools for Accurate Modelling",
"warning": "",
- "doc_url": "https://github.com/Clockmender/Precision-Drawing-Tools/wiki",
+ "doc_url": "{BLENDER_MANUAL_URL}/addons/3d_view/precision_drawing_tools/index.html",
"category": "3D View",
}
diff --git a/real_snow.py b/real_snow.py
index d7290b3b..a61268db 100644
--- a/real_snow.py
+++ b/real_snow.py
@@ -7,7 +7,7 @@ bl_info = {
"version": (1, 2),
"blender": (2, 83, 0),
"location": "View 3D > Properties Panel",
- "doc_url": "https://github.com/marcopavanello/real-snow",
+ "doc_url": "{BLENDER_MANUAL_URL}/addons/object/real_snow.html",
"tracker_url": "https://github.com/marcopavanello/real-snow/issues",
"support": "COMMUNITY",
"category": "Object",
diff --git a/viewport_vr_preview/action_map_io.py b/viewport_vr_preview/action_map_io.py
index 809a2906..92b5b676 100644
--- a/viewport_vr_preview/action_map_io.py
+++ b/viewport_vr_preview/action_map_io.py
@@ -69,7 +69,7 @@ def ami_args_as_data(ami):
def ami_data_from_args(ami, args):
ami.type = args["type"]
-
+
for path in args["user_paths"]:
ami.user_paths.new(path)
diff --git a/viewport_vr_preview/defaults.py b/viewport_vr_preview/defaults.py
index fb793554..aa940c2d 100644
--- a/viewport_vr_preview/defaults.py
+++ b/viewport_vr_preview/defaults.py
@@ -100,7 +100,7 @@ def vr_defaults_action_add(am,
ami = am.actionmap_items.new(name, True)
if ami:
- ami.type = 'FLOAT'
+ ami.type = 'FLOAT'
for path in user_paths:
ami.user_paths.new(path)
ami.op = op