Welcome to mirror list, hosted at ThFree Co, Russian Federation.

git.blender.org/blender-addons.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorAntonio Vazquez <blendergit@gmail.com>2020-08-14 19:02:03 +0300
committerAntonio Vazquez <blendergit@gmail.com>2020-08-14 19:02:03 +0300
commit39f51712fb57631bccb69883c39a8807a34319d7 (patch)
treeba93044850be2bae955e8c0512f454aca25ad77e
parentd7e2005d22435e104507ac129cd15d2eb6a10258 (diff)
parentabeef11a77ab5b05f4ce2c71b65c341bdcb7303d (diff)
Merge branch 'master' into greasepencil-addon
-rw-r--r--add_curve_extra_objects/add_curve_spirals.py26
-rw-r--r--add_curve_sapling/__init__.py2
-rw-r--r--add_curve_sapling/presets/quaking_aspen.py2
-rw-r--r--add_curve_sapling/presets/willow.py2
-rw-r--r--add_mesh_BoltFactory/Boltfactory.py8
-rw-r--r--add_mesh_BoltFactory/createMesh.py154
-rw-r--r--add_mesh_extra_objects/add_mesh_round_brilliant.py2
-rw-r--r--archipack/archipack_object.py2
-rw-r--r--archipack/archipack_reference_point.py3
-rw-r--r--archipack/archipack_wall2.py2
-rw-r--r--blenderkit/__init__.py87
-rw-r--r--blenderkit/append_link.py47
-rw-r--r--blenderkit/bkit_oauth.py26
-rw-r--r--blenderkit/data/categories.json1509
-rw-r--r--blenderkit/download.py108
-rw-r--r--blenderkit/paths.py24
-rw-r--r--blenderkit/ratings.py268
-rw-r--r--blenderkit/rerequests.py9
-rw-r--r--blenderkit/search.py152
-rw-r--r--blenderkit/tasks_queue.py14
-rw-r--r--blenderkit/ui.py99
-rw-r--r--blenderkit/ui_panels.py322
-rw-r--r--blenderkit/upload.py4
-rw-r--r--blenderkit/utils.py118
-rw-r--r--bone_selection_sets.py14
-rw-r--r--io_coat3D/__init__.py193
-rw-r--r--io_coat3D/data.json9
-rw-r--r--io_coat3D/tex.py326
-rw-r--r--io_coat3D/texVR.py846
-rw-r--r--io_mesh_ply/__init__.py72
-rw-r--r--io_mesh_ply/export_ply.py154
-rw-r--r--io_mesh_ply/import_ply.py76
-rw-r--r--io_mesh_uv_layout/export_uv_png.py1
-rwxr-xr-xio_scene_gltf2/__init__.py46
-rwxr-xr-xio_scene_gltf2/blender/com/gltf2_blender_math.py13
-rwxr-xr-xio_scene_gltf2/blender/exp/gltf2_blender_export.py8
-rwxr-xr-xio_scene_gltf2/blender/exp/gltf2_blender_extract.py814
-rwxr-xr-xio_scene_gltf2/blender/exp/gltf2_blender_gather_animation_sampler_keyframes.py10
-rwxr-xr-xio_scene_gltf2/blender/exp/gltf2_blender_gather_animation_samplers.py8
-rwxr-xr-xio_scene_gltf2/blender/exp/gltf2_blender_gather_cameras.py6
-rwxr-xr-xio_scene_gltf2/blender/exp/gltf2_blender_gather_joints.py22
-rwxr-xr-xio_scene_gltf2/blender/exp/gltf2_blender_gather_materials_pbr_metallic_roughness.py7
-rwxr-xr-xio_scene_gltf2/blender/exp/gltf2_blender_gather_nodes.py52
-rwxr-xr-xio_scene_gltf2/blender/exp/gltf2_blender_gather_primitive_attributes.py190
-rwxr-xr-xio_scene_gltf2/blender/exp/gltf2_blender_gather_primitives.py58
-rwxr-xr-xio_scene_gltf2/blender/exp/gltf2_blender_gather_skins.py9
-rwxr-xr-xio_scene_gltf2/blender/exp/gltf2_blender_utils.py67
-rwxr-xr-xio_scene_gltf2/blender/imp/gltf2_blender_KHR_materials_pbrSpecularGlossiness.py3
-rwxr-xr-xio_scene_gltf2/blender/imp/gltf2_blender_gltf.py4
-rwxr-xr-xio_scene_gltf2/blender/imp/gltf2_blender_mesh.py699
-rwxr-xr-xio_scene_gltf2/blender/imp/gltf2_blender_pbrMetallicRoughness.py9
-rwxr-xr-xio_scene_gltf2/blender/imp/gltf2_blender_primitive.py344
-rwxr-xr-xio_scene_gltf2/io/com/gltf2_io_constants.py12
-rwxr-xr-xio_scene_gltf2/io/imp/gltf2_io_binary.py136
-rwxr-xr-xio_scene_gltf2/io/imp/gltf2_io_gltf.py2
-rw-r--r--magic_uv/__init__.py10
-rw-r--r--magic_uv/common.py220
-rw-r--r--magic_uv/lib/__init__.py4
-rw-r--r--magic_uv/op/__init__.py6
-rw-r--r--magic_uv/op/align_uv.py10
-rw-r--r--magic_uv/op/align_uv_cursor.py4
-rw-r--r--magic_uv/op/clip_uv.py227
-rw-r--r--magic_uv/op/copy_paste_uv.py11
-rw-r--r--magic_uv/op/copy_paste_uv_object.py4
-rw-r--r--magic_uv/op/copy_paste_uv_uvedit.py4
-rw-r--r--magic_uv/op/flip_rotate_uv.py4
-rw-r--r--magic_uv/op/mirror_uv.py4
-rw-r--r--magic_uv/op/move_uv.py63
-rw-r--r--magic_uv/op/pack_uv.py4
-rw-r--r--magic_uv/op/preserve_uv_aspect.py4
-rw-r--r--magic_uv/op/select_uv.py83
-rw-r--r--magic_uv/op/smooth_uv.py10
-rw-r--r--magic_uv/op/texture_lock.py6
-rw-r--r--magic_uv/op/texture_projection.py4
-rw-r--r--magic_uv/op/texture_wrap.py4
-rw-r--r--magic_uv/op/transfer_uv.py4
-rw-r--r--magic_uv/op/unwrap_constraint.py4
-rw-r--r--magic_uv/op/uv_bounding_box.py10
-rw-r--r--magic_uv/op/uv_inspection.py249
-rw-r--r--magic_uv/op/uv_sculpt.py24
-rw-r--r--magic_uv/op/uvw.py64
-rw-r--r--magic_uv/op/world_scale_uv.py394
-rw-r--r--magic_uv/preferences.py15
-rw-r--r--magic_uv/properites.py4
-rw-r--r--magic_uv/ui/IMAGE_MT_uvs.py14
-rw-r--r--magic_uv/ui/VIEW3D_MT_object.py4
-rw-r--r--magic_uv/ui/VIEW3D_MT_uv_map.py17
-rw-r--r--magic_uv/ui/__init__.py4
-rw-r--r--magic_uv/ui/uvedit_copy_paste_uv.py4
-rw-r--r--magic_uv/ui/uvedit_editor_enhancement.py7
-rw-r--r--magic_uv/ui/uvedit_uv_manipulation.py18
-rw-r--r--magic_uv/ui/view3d_copy_paste_uv_editmode.py4
-rw-r--r--magic_uv/ui/view3d_copy_paste_uv_objectmode.py4
-rw-r--r--magic_uv/ui/view3d_uv_manipulation.py114
-rw-r--r--magic_uv/ui/view3d_uv_mapping.py4
-rw-r--r--magic_uv/updater.py4
-rw-r--r--magic_uv/utils/__init__.py4
-rw-r--r--magic_uv/utils/addon_updater.py8
-rw-r--r--magic_uv/utils/bl_class_registry.py4
-rw-r--r--magic_uv/utils/compatibility.py4
-rw-r--r--magic_uv/utils/property_class_registry.py4
-rw-r--r--measureit/measureit_geometry.py4
-rw-r--r--mesh_auto_mirror.py3
-rw-r--r--mesh_bsurfaces.py15
-rw-r--r--mesh_tissue/dual_mesh.py4
-rw-r--r--object_carver/carver_operator.py6
-rw-r--r--object_carver/carver_utils.py8
-rw-r--r--object_collection_manager/__init__.py30
-rw-r--r--object_collection_manager/internals.py93
-rw-r--r--object_collection_manager/operator_utils.py260
-rw-r--r--object_collection_manager/operators.py391
-rw-r--r--object_collection_manager/qcd_move_widget.py51
-rw-r--r--object_collection_manager/qcd_operators.py53
-rw-r--r--object_collection_manager/ui.py178
-rw-r--r--object_skinify.py4
-rw-r--r--oscurart_tools/__init__.py4
-rw-r--r--oscurart_tools/mesh/overlap_uvs.py94
-rw-r--r--oscurart_tools/mesh/print_uv_stats.py28
-rw-r--r--oscurart_tools/render/material_overrides.py56
-rw-r--r--oscurart_tools/render/render_tokens.py12
-rw-r--r--power_sequencer/operators/scene_create_from_selection.py10
-rw-r--r--power_sequencer/scripts/BPSRender/bpsrender/__init__.py1
-rw-r--r--presets/interface_theme/deep_grey.xml16
-rw-r--r--render_povray/__init__.py358
-rw-r--r--render_povray/nodes.py33
-rw-r--r--render_povray/primitives.py55
-rw-r--r--render_povray/render.py362
-rw-r--r--render_povray/shading.py144
-rw-r--r--render_povray/ui.py749
-rw-r--r--rigify/utils/bones.py2
-rw-r--r--rigify/utils/mechanism.py3
-rw-r--r--space_view3d_copy_attributes.py6
-rw-r--r--space_view3d_modifier_tools.py2
-rw-r--r--viewport_vr_preview.py325
134 files changed, 7549 insertions, 4687 deletions
diff --git a/add_curve_extra_objects/add_curve_spirals.py b/add_curve_extra_objects/add_curve_spirals.py
index 2ccdc51a..aa879007 100644
--- a/add_curve_extra_objects/add_curve_spirals.py
+++ b/add_curve_extra_objects/add_curve_spirals.py
@@ -245,19 +245,6 @@ def draw_curve(props, context, align_matrix):
Curve.rotation_euler = props.rotation_euler
Curve.select_set(True)
- # set curveOptions
- Curve.data.dimensions = props.shape
- Curve.data.use_path = True
- if props.shape == '3D':
- Curve.data.fill_mode = 'FULL'
- else:
- Curve.data.fill_mode = 'BOTH'
-
- # set curveOptions
- newSpline.use_cyclic_u = props.use_cyclic_u
- newSpline.use_endpoint_u = props.endp_u
- newSpline.order_u = props.order_u
-
# turn verts into array
vertArray = vertsToPoints(verts, splineType)
@@ -288,6 +275,19 @@ def draw_curve(props, context, align_matrix):
for point in newSpline.points:
point.select = True
+ # set curveOptions
+ newSpline.use_cyclic_u = props.use_cyclic_u
+ newSpline.use_endpoint_u = props.endp_u
+ newSpline.order_u = props.order_u
+
+ # set curveOptions
+ Curve.data.dimensions = props.shape
+ Curve.data.use_path = True
+ if props.shape == '3D':
+ Curve.data.fill_mode = 'FULL'
+ else:
+ Curve.data.fill_mode = 'BOTH'
+
# move and rotate spline in edit mode
if bpy.context.mode == 'EDIT_CURVE':
bpy.ops.transform.translate(value = props.startlocation)
diff --git a/add_curve_sapling/__init__.py b/add_curve_sapling/__init__.py
index b12beada..e5b88157 100644
--- a/add_curve_sapling/__init__.py
+++ b/add_curve_sapling/__init__.py
@@ -701,7 +701,6 @@ class AddTree(Operator):
items=objectList,
update=update_leaves
)
- """
bend = FloatProperty(
name='Leaf Bend',
description='The proportion of bending applied to the leaf (Bend)',
@@ -709,7 +708,6 @@ class AddTree(Operator):
max=1.0,
default=0.0, update=update_leaves
)
- """
leafangle: FloatProperty(
name='Leaf Angle',
description='Leaf vertical attraction',
diff --git a/add_curve_sapling/presets/quaking_aspen.py b/add_curve_sapling/presets/quaking_aspen.py
index 6cab7386..70e0a80b 100644
--- a/add_curve_sapling/presets/quaking_aspen.py
+++ b/add_curve_sapling/presets/quaking_aspen.py
@@ -1 +1 @@
-{'leafScale': 0.17000000178813934, 'autoTaper': True, 'customShape': (0.5, 1.0, 0.30000001192092896, 0.5), 'leafShape': 'hex', 'curve': (0.0, -40.0, -40.0, 0.0), 'ratio': 0.014999999664723873, 'splitBias': 0.0, 'pruneWidth': 0.4000000059604645, 'downAngleV': (0.0, 80.0, 10.0, 10.0), 'rotate': (99.5, 137.5, 137.5, 137.5), 'pruneRatio': 1.0, 'leafDownAngle': 45.0, 'makeMesh': False, 'radiusTweak': (1.0, 1.0, 1.0, 1.0), 'rMode': 'rotate', 'splitAngleV': (0.0, 0.0, 0.0, 0.0), 'branchDist': 1.0, 'bevel': False, 'minRadius': 0.001500000013038516, 'prune': False, 'leafRotateV': 0.0, 'splitAngle': (0.0, 0.0, 0.0, 0.0), 'armAnim': False, 'boneStep': (1, 1, 1, 1), 'pruneBase': 0.30000001192092896, 'taperCrown': 0.0, 'baseSplits': 0, 'baseSize_s': 0.25, 'handleType': '0', 'baseSize': 0.4000000059604645, 'af1': 1.0, 'levels': 2, 'leafScaleV': 0.0, 'resU': 4, 'seed': 0, 'downAngle': (90.0, 110.0, 45.0, 45.0), 'leafangle': 0.0, 'scaleV0': 0.10000000149011612, 'prunePowerHigh': 0.5, 'splitByLen': True, 'wind': 1.0, 'shape': '7', 'prunePowerLow': 0.0010000000474974513, 'scale': 13.0, 'leafAnim': False, 'curveBack': (0.0, 0.0, 0.0, 0.0), 'leafScaleX': 1.0, 'horzLeaves': True, 'splitHeight': 0.20000000298023224, 'leafScaleT': 0.0, 'scaleV': 3.0, 'leafDist': '6', 'nrings': 0, 'curveRes': (8, 5, 3, 1), 'shapeS': '4', 'bevelRes': 0, 'useOldDownAngle': False, 'useParentAngle': True, 'armLevels': 2, 'scale0': 1.0, 'taper': (1.0, 1.0, 1.0, 1.0), 'pruneWidthPeak': 0.6000000238418579, 'previewArm': False, 'leaves': 25, 'ratioPower': 1.100000023841858, 'gustF': 0.07500000298023224, 'curveV': (20.0, 50.0, 75.0, 0.0), 'showLeaves': False, 'frameRate': 1.0, 'length': (1.0, 0.30000001192092896, 0.6000000238418579, 0.44999998807907104), 'branches': (0, 50, 30, 10), 'useArm': False, 'loopFrames': 0, 'gust': 1.0, 'af3': 4.0, 'closeTip': False, 'leafRotate': 137.5, 'attractUp': (0.0, 0.0, 0.5, 0.5), 'leafDownAngleV': 10.0, 'rootFlare': 1.0, 'af2': 1.0, 'lengthV': (0.0, 0.0, 0.0, 0.0), 'rotateV': (15.0, 0.0, 0.0, 0.0), 'attractOut': (0.0, 0.0, 0.0, 0.0), 'segSplits': (0.0, 0.0, 0.0, 0.0)}
+{'leafScale': 0.17000000178813934, 'autoTaper': True, 'customShape': (0.5, 1.0, 0.30000001192092896, 0.5), 'leafShape': 'hex', 'curve': (0.0, -40.0, -40.0, 0.0), 'ratio': 0.014999999664723873, 'splitBias': 0.0, 'pruneWidth': 0.4000000059604645, 'downAngleV': (0.0, 80.0, 10.0, 10.0), 'rotate': (99.5, 137.5, 137.5, 137.5), 'pruneRatio': 1.0, 'leafDownAngle': 45.0, 'makeMesh': False, 'radiusTweak': (1.0, 1.0, 1.0, 1.0), 'rMode': 'rotate', 'splitAngleV': (0.0, 0.0, 0.0, 0.0), 'branchDist': 1.0, 'bevel': True, 'minRadius': 0.001500000013038516, 'prune': False, 'leafRotateV': 0.0, 'splitAngle': (0.0, 0.0, 0.0, 0.0), 'armAnim': False, 'boneStep': (1, 1, 1, 1), 'pruneBase': 0.30000001192092896, 'taperCrown': 0.0, 'baseSplits': 0, 'baseSize_s': 0.25, 'handleType': '0', 'baseSize': 0.4000000059604645, 'af1': 1.0, 'levels': 2, 'leafScaleV': 0.0, 'resU': 4, 'seed': 0, 'downAngle': (90.0, 110.0, 45.0, 45.0), 'leafangle': 0.0, 'scaleV0': 0.10000000149011612, 'prunePowerHigh': 0.5, 'splitByLen': True, 'wind': 1.0, 'shape': '7', 'prunePowerLow': 0.0010000000474974513, 'scale': 13.0, 'leafAnim': False, 'curveBack': (0.0, 0.0, 0.0, 0.0), 'leafScaleX': 1.0, 'horzLeaves': True, 'splitHeight': 0.20000000298023224, 'leafScaleT': 0.0, 'scaleV': 3.0, 'leafDist': '6', 'nrings': 0, 'curveRes': (8, 5, 3, 1), 'shapeS': '4', 'bevelRes': 0, 'useOldDownAngle': False, 'useParentAngle': True, 'armLevels': 2, 'scale0': 1.0, 'taper': (1.0, 1.0, 1.0, 1.0), 'pruneWidthPeak': 0.6000000238418579, 'previewArm': False, 'leaves': 25, 'ratioPower': 1.100000023841858, 'gustF': 0.07500000298023224, 'curveV': (20.0, 50.0, 75.0, 0.0), 'showLeaves': False, 'frameRate': 1.0, 'length': (1.0, 0.30000001192092896, 0.6000000238418579, 0.44999998807907104), 'branches': (0, 50, 30, 10), 'useArm': False, 'loopFrames': 0, 'gust': 1.0, 'af3': 4.0, 'closeTip': False, 'leafRotate': 137.5, 'attractUp': (0.0, 0.0, 0.5, 0.5), 'leafDownAngleV': 10.0, 'rootFlare': 1.0, 'af2': 1.0, 'lengthV': (0.0, 0.0, 0.0, 0.0), 'rotateV': (15.0, 0.0, 0.0, 0.0), 'attractOut': (0.0, 0.0, 0.0, 0.0), 'segSplits': (0.0, 0.0, 0.0, 0.0)}
diff --git a/add_curve_sapling/presets/willow.py b/add_curve_sapling/presets/willow.py
index 5c60b0ff..3384eb19 100644
--- a/add_curve_sapling/presets/willow.py
+++ b/add_curve_sapling/presets/willow.py
@@ -1 +1 @@
-{'curveRes': (8, 16, 12, 1), 'scaleV0': 0.0, 'pruneRatio': 1.0, 'rotate': (0.0, -120.0, -120.0, 140.0), 'resU': 4, 'levels': 2, 'frameRate': 1.0, 'ratioPower': 2.0, 'windGust': 0.0, 'branches': (0, 25, 10, 300), 'attractUp': -3.0, 'bevel': False, 'windSpeed': 2.0, 'rotateV': (0.0, 30.0, 30.0, 0.0), 'segSplits': (0.10000000149011612, 0.20000000298023224, 0.20000000298023224, 0.0), 'handleType': '1', 'shape': '3', 'curveV': (120.0, 90.0, 0.0, 0.0), 'scale': 15.0, 'leafShape': 'hex', 'showLeaves': False, 'ratio': 0.029999999329447746, 'leaves': 15.0, 'armAnim': False, 'leafScale': 0.11999999731779099, 'leafDist': '4', 'useArm': False, 'splitAngle': (3.0, 30.0, 45.0, 0.0), 'lengthV': (0.0, 0.10000000149011612, 0.0, 0.0), 'seed': 0, 'scaleV': 5.0, 'startCurv': 0.0, 'downAngle': (0.0, 20.0, 30.0, 20.0), 'pruneWidth': 0.4000000059604645, 'baseSize': 0.05000000074505806, 'bevelRes': 0, 'length': (0.800000011920929, 0.5, 1.5, 0.10000000149011612), 'downAngleV': (0.0, 10.0, 10.0, 10.0), 'prune': False, 'curve': (0.0, 40.0, 0.0, 0.0), 'taper': (1.0, 1.0, 1.0, 1.0), 'prunePowerHigh': 0.5, 'leafScaleX': 0.20000000298023224, 'curveBack': (20.0, 80.0, 0.0, 0.0), 'bend': 0.0, 'scale0': 1.0, 'prunePowerLow': 0.0010000000474974513, 'splitAngleV': (0.0, 10.0, 20.0, 0.0), 'baseSplits': 2, 'pruneWidthPeak': 0.6000000238418579}
+{'curveRes': (8, 16, 12, 1), 'scaleV0': 0.0, 'pruneRatio': 1.0, 'rotate': (0.0, -120.0, -120.0, 140.0), 'resU': 4, 'levels': 2, 'frameRate': 1.0, 'ratioPower': 2.0, 'windGust': 0.0, 'branches': (0, 25, 10, 300), 'attractUp': -3.0, 'bevel': True, 'windSpeed': 2.0, 'rotateV': (0.0, 30.0, 30.0, 0.0), 'segSplits': (0.10000000149011612, 0.20000000298023224, 0.20000000298023224, 0.0), 'handleType': '1', 'shape': '3', 'curveV': (120.0, 90.0, 0.0, 0.0), 'scale': 15.0, 'leafShape': 'hex', 'showLeaves': False, 'ratio': 0.029999999329447746, 'leaves': 15.0, 'armAnim': False, 'leafScale': 0.11999999731779099, 'leafDist': '4', 'useArm': False, 'splitAngle': (3.0, 30.0, 45.0, 0.0), 'lengthV': (0.0, 0.10000000149011612, 0.0, 0.0), 'seed': 0, 'scaleV': 5.0, 'startCurv': 0.0, 'downAngle': (0.0, 20.0, 30.0, 20.0), 'pruneWidth': 0.4000000059604645, 'baseSize': 0.05000000074505806, 'bevelRes': 0, 'length': (0.800000011920929, 0.5, 1.5, 0.10000000149011612), 'downAngleV': (0.0, 10.0, 10.0, 10.0), 'prune': False, 'curve': (0.0, 40.0, 0.0, 0.0), 'taper': (1.0, 1.0, 1.0, 1.0), 'prunePowerHigh': 0.5, 'leafScaleX': 0.20000000298023224, 'curveBack': (20.0, 80.0, 0.0, 0.0), 'bend': 0.0, 'scale0': 1.0, 'prunePowerLow': 0.0010000000474974513, 'splitAngleV': (0.0, 10.0, 20.0, 0.0), 'baseSplits': 2, 'pruneWidthPeak': 0.6000000238418579}
diff --git a/add_mesh_BoltFactory/Boltfactory.py b/add_mesh_BoltFactory/Boltfactory.py
index acfe5886..244d9720 100644
--- a/add_mesh_BoltFactory/Boltfactory.py
+++ b/add_mesh_BoltFactory/Boltfactory.py
@@ -141,7 +141,7 @@ class add_mesh_bolt(Operator, AddObjectHelper):
('bf_Torx_T50', 'T50', 'T50'),
('bf_Torx_T55', 'T55', 'T55'),
]
-
+
bf_Torx_Size_Type: EnumProperty(
attr='bf_Torx_Size_Type',
name='Torx Size',
@@ -323,7 +323,7 @@ class add_mesh_bolt(Operator, AddObjectHelper):
description='Height of the 12 Point Nut',
unit='LENGTH',
)
-
+
bf_12_Point_Nut_Flat_Distance: FloatProperty(
attr='bf_12_Point_Nut_Flat_Distance',
name='12 Point Nut Flat Dist', default=3.0,
@@ -400,8 +400,8 @@ class add_mesh_bolt(Operator, AddObjectHelper):
col.prop(self, 'bf_Hex_Nut_Height')
col.prop(self, 'bf_Hex_Nut_Flat_Distance')
-
-
+
+
# Thread
col.label(text='Thread')
if self.bf_Model_Type == 'bf_Model_Bolt':
diff --git a/add_mesh_BoltFactory/createMesh.py b/add_mesh_BoltFactory/createMesh.py
index 96284012..e19f15ba 100644
--- a/add_mesh_BoltFactory/createMesh.py
+++ b/add_mesh_BoltFactory/createMesh.py
@@ -291,14 +291,14 @@ def Fill_Fan_Face(OFFSET, NUM, FACE_DOWN=0):
Ret = []
Face = [NUM-1,0,1]
TempFace = [0, 0, 0]
- A = 0
+ A = 0
#B = 1 unsed
C = 2
if NUM < 3:
return None
for _i in range(NUM - 2):
TempFace[0] = Face[A]
- TempFace[1] = Face[C]
+ TempFace[1] = Face[C]
TempFace[2] = Face[C]+1
if FACE_DOWN:
Ret.append([OFFSET + Face[2], OFFSET + Face[1], OFFSET + Face[0]])
@@ -429,18 +429,18 @@ def Torx_Fill(OFFSET, FLIP=0):
Lookup = [[0,10,11],
[0,11, 12],
[0,12,1],
-
+
[1, 12, 13],
[1, 13, 14],
[1, 14, 15],
[1, 15, 2],
-
+
[2, 15, 16],
[2, 16, 17],
[2, 17, 18],
[2, 18, 19],
[2, 19, 3],
-
+
[3, 19, 20],
[3, 20, 21],
[3, 21, 22],
@@ -448,8 +448,8 @@ def Torx_Fill(OFFSET, FLIP=0):
[3, 23, 24],
[3, 24, 25],
[3, 25, 4],
-
-
+
+
[4, 25, 26],
[4, 26, 27],
[4, 27, 28],
@@ -457,25 +457,25 @@ def Torx_Fill(OFFSET, FLIP=0):
[4, 29, 30],
[4, 30, 31],
[4, 31, 5],
-
+
[5, 31, 32],
[5, 32, 33],
[5, 33, 34],
[5, 34, 35],
[5, 35, 36],
[5, 36, 6],
-
+
[6, 36, 37],
[6, 37, 38],
[6, 38, 39],
[6, 39, 7],
-
+
[7, 39, 40],
[7, 40, 41],
[7, 41, 42],
[7, 42, 43],
[7, 43, 8],
-
+
[8, 43, 44],
[8, 44, 45],
[8, 45, 46],
@@ -505,40 +505,40 @@ def Create_Torx_Bit(Point_Distance, HEIGHT):
OUTTER_RADIUS = POINT_RADIUS * 1.05
POINT_1_Y = POINT_RADIUS * 0.816592592592593
- POINT_2_X = POINT_RADIUS * 0.511111111111111
- POINT_2_Y = POINT_RADIUS * 0.885274074074074
- POINT_3_X = POINT_RADIUS * 0.7072
- POINT_3_Y = POINT_RADIUS * 0.408296296296296
- POINT_4_X = POINT_RADIUS * 1.02222222222222
- SMALL_RADIUS = POINT_RADIUS * 0.183407407407407
- BIG_RADIUS = POINT_RADIUS * 0.333333333333333
+ POINT_2_X = POINT_RADIUS * 0.511111111111111
+ POINT_2_Y = POINT_RADIUS * 0.885274074074074
+ POINT_3_X = POINT_RADIUS * 0.7072
+ POINT_3_Y = POINT_RADIUS * 0.408296296296296
+ POINT_4_X = POINT_RADIUS * 1.02222222222222
+ SMALL_RADIUS = POINT_RADIUS * 0.183407407407407
+ BIG_RADIUS = POINT_RADIUS * 0.333333333333333
# Values for T40 # POINT_1_Y = 2.756
# POINT_2_X = 1.725
-# POINT_2_Y = 2.9878
+# POINT_2_Y = 2.9878
# POINT_3_X = 2.3868
# POINT_3_Y = 1.378
# POINT_4_X = 3.45
-#
+#
# SMALL_RADIUS = 0.619
# BIG_RADIUS = 1.125
def Do_Curve(Curve_Height):
- for i in range(0, 90, 10):
+ for i in range(0, 90, 10):
x = sin(radians(i)) * SMALL_RADIUS
y = cos(radians(i)) * SMALL_RADIUS
verts.append([x, POINT_1_Y + y, Curve_Height])
-
- for i in range(260, 150, -10):
+
+ for i in range(260, 150, -10):
x = sin(radians(i)) * BIG_RADIUS
y = cos(radians(i)) * BIG_RADIUS
verts.append([POINT_2_X + x, POINT_2_Y + y, Curve_Height])
-
+
for i in range(340, 150 + 360, 10):
x = sin(radians(i%360)) * SMALL_RADIUS
y = cos(radians(i%360)) * SMALL_RADIUS
verts.append([POINT_3_X + x, POINT_3_Y + y, Curve_Height])
-
- for i in range(320, 260, -10):
+
+ for i in range(320, 260, -10):
x = sin(radians(i)) * BIG_RADIUS
y = cos(radians(i)) * BIG_RADIUS
verts.append([POINT_4_X + x, y, Curve_Height])
@@ -553,19 +553,19 @@ def Create_Torx_Bit(Point_Distance, HEIGHT):
FaceStart_Top_Curve= len(verts)
Do_Curve(0)
faces.extend(Torx_Fill(FaceStart_Outside, 0))
-
+
FaceStart_Bottom_Curve= len(verts)
Do_Curve(0 - HEIGHT)
-
+
faces.extend(Build_Face_List_Quads(FaceStart_Top_Curve,42 ,1 , True))
-
+
verts.append([0,0,0 - HEIGHT]) # add center point for fill Fan
faces.extend(Fill_Fan_Face(FaceStart_Bottom_Curve, 44))
-
+
M_Verts, M_Faces = Mirror_Verts_Faces(verts, faces, 'x')
verts.extend(M_Verts)
faces.extend(M_Faces)
-
+
M_Verts, M_Faces = Mirror_Verts_Faces(verts, faces, 'y')
verts.extend(M_Verts)
faces.extend(M_Faces)
@@ -1078,12 +1078,12 @@ def Create_12_Point(FLAT, HOLE_DIA, SHANK_DIA, HEIGHT,FLANGE_DIA):
FLANGE_HEIGHT = (1.89/8.0)*HEIGHT
FLAT_HEIGHT = (4.18/8.0)*HEIGHT
# FLANGE_DIA = (13.27/8.0)*FLAT
-
+
FLANGE_RADIUS = FLANGE_DIA * 0.5
- FLANGE_TAPPER_HEIGHT = HEIGHT - FLANGE_HEIGHT - FLAT_HEIGHT
-
+ FLANGE_TAPPER_HEIGHT = HEIGHT - FLANGE_HEIGHT - FLAT_HEIGHT
+
# HOLE_DIA = 0.0
-
+
verts = []
faces = []
HOLE_RADIUS = HOLE_DIA / 2
@@ -1098,7 +1098,7 @@ def Create_12_Point(FLAT, HOLE_DIA, SHANK_DIA, HEIGHT,FLANGE_DIA):
verts.append([0.0, 0.0, 0.0])
-# print("HOLE_RADIUS" + str(HOLE_RADIUS))
+# print("HOLE_RADIUS" + str(HOLE_RADIUS))
# print("TopBevelRadius" + str(TopBevelRadius))
FaceStart = len(verts)
@@ -1123,15 +1123,15 @@ def Create_12_Point(FLAT, HOLE_DIA, SHANK_DIA, HEIGHT,FLANGE_DIA):
x = sin(radians(20)) * HOLE_RADIUS
y = cos(radians(20)) * HOLE_RADIUS
verts.append([x, y, 0.0])
-
+
x = sin(radians(25)) * HOLE_RADIUS
y = cos(radians(25)) * HOLE_RADIUS
verts.append([x, y, 0.0])
-
+
x = sin(radians(30)) * HOLE_RADIUS
y = cos(radians(30)) * HOLE_RADIUS
verts.append([x, y, 0.0])
-
+
Row += 1
@@ -1156,17 +1156,17 @@ def Create_12_Point(FLAT, HOLE_DIA, SHANK_DIA, HEIGHT,FLANGE_DIA):
y = cos(radians(15)) * TopBevelRadius
vec4 = Vector([x, y, 0.0])
verts.append([x, y, 0.0])
-
+
x = sin(radians(20)) * TopBevelRadius
y = cos(radians(20)) * TopBevelRadius
vec5 = Vector([x, y, 0.0])
verts.append([x, y, 0.0])
-
+
x = sin(radians(25)) * TopBevelRadius
y = cos(radians(25)) * TopBevelRadius
vec6 = Vector([x, y, 0.0])
verts.append([x, y, 0.0])
-
+
x = sin(radians(30)) * TopBevelRadius
y = cos(radians(30)) * TopBevelRadius
vec7 = Vector([x, y, 0.0])
@@ -1176,11 +1176,11 @@ def Create_12_Point(FLAT, HOLE_DIA, SHANK_DIA, HEIGHT,FLANGE_DIA):
#45Deg bevel on the top
-
+
#First we work out how far up the Y axis the vert is
v_origin = Vector([0.0,0.0,0.0]) # center of the model
- v_15Deg_Point = Vector([tan(radians(15)) * Half_Flat,Half_Flat,0.0]) #Is a know point to work back from
-
+ v_15Deg_Point = Vector([tan(radians(15)) * Half_Flat,Half_Flat,0.0]) #Is a know point to work back from
+
x = tan(radians(0)) * Half_Flat
Point_Distance =(tan(radians(30)) * v_15Deg_Point.x)+Half_Flat
dvec = vec1 - Vector([x, Point_Distance, 0.0])
@@ -1188,18 +1188,18 @@ def Create_12_Point(FLAT, HOLE_DIA, SHANK_DIA, HEIGHT,FLANGE_DIA):
v_0_Deg_Top_Point = Vector([x, Point_Distance, -dvec.length])
v_0_Deg_Point = Vector([x, Point_Distance,0.0])
-
+
v_5Deg_Line = Vector([tan(radians(5)) * Half_Flat, Half_Flat, 0.0])
v_5Deg_Line.length *= 2 # extende out the line on a 5 deg angle
#We cross 2 lines. One from the origin to the 0 Deg point
#and the second is from the orign extended out past the first line
- # This gives the cross point of the
+ # This gives the cross point of the
v_Cross = geometry.intersect_line_line_2d(v_0_Deg_Point,v_15Deg_Point,v_origin,v_5Deg_Line)
dvec = vec2 - Vector([v_Cross.x,v_Cross.y,0.0])
verts.append([v_Cross.x,v_Cross.y,-dvec.length])
v_5_Deg_Top_Point = Vector([v_Cross.x,v_Cross.y,-dvec.length])
-
+
v_10Deg_Line = Vector([tan(radians(10)) * Half_Flat, Half_Flat, 0.0])
v_10Deg_Line.length *= 2 # extende out the line
@@ -1207,14 +1207,14 @@ def Create_12_Point(FLAT, HOLE_DIA, SHANK_DIA, HEIGHT,FLANGE_DIA):
dvec = vec3 - Vector([v_Cross.x,v_Cross.y,0.0])
verts.append([v_Cross.x,v_Cross.y,-dvec.length])
v_10_Deg_Top_Point = Vector([v_Cross.x,v_Cross.y,-dvec.length])
-
- #The remain points are stright forward because y is all the same y height (Half_Flat)
+
+ #The remain points are stright forward because y is all the same y height (Half_Flat)
x = tan(radians(15)) * Half_Flat
dvec = vec4 - Vector([x, Half_Flat, 0.0])
Lowest_Point = -dvec.length
verts.append([x, Half_Flat, -dvec.length])
v_15_Deg_Top_Point = Vector([x, Half_Flat, -dvec.length])
-
+
x = tan(radians(20)) * Half_Flat
dvec = vec5 - Vector([x, Half_Flat, 0.0])
Lowest_Point = -dvec.length
@@ -1234,43 +1234,43 @@ def Create_12_Point(FLAT, HOLE_DIA, SHANK_DIA, HEIGHT,FLANGE_DIA):
v_30_Deg_Top_Point = Vector([x, Half_Flat, -dvec.length])
Row += 1
-
+
#Down Bits
# print ("Point_Distance")
# print (Point_Distance)
-
+
Flange_Adjacent = FLANGE_RADIUS - Point_Distance
if (Flange_Adjacent == 0.0):
Flange_Adjacent = 0.000001
- Flange_Opposite = FLANGE_TAPPER_HEIGHT
-
+ Flange_Opposite = FLANGE_TAPPER_HEIGHT
+
# print ("Flange_Opposite")
# print (Flange_Opposite)
# print ("Flange_Adjacent")
# print (Flange_Adjacent)
-
+
FLANGE_ANGLE_RAD = atan(Flange_Opposite/Flange_Adjacent )
# FLANGE_ANGLE_RAD = radians(45)
# print("FLANGE_ANGLE_RAD")
-# print (degrees (FLANGE_ANGLE_RAD))
-
-
+# print (degrees (FLANGE_ANGLE_RAD))
+
+
v_Extended_Flange_Edge = Vector([0.0,0.0,-HEIGHT + FLANGE_HEIGHT + (tan(FLANGE_ANGLE_RAD)* FLANGE_RADIUS) ])
# print("v_Extended_Flange_Edge")
-# print (v_Extended_Flange_Edge)
+# print (v_Extended_Flange_Edge)
#0deg
v_Flange_Edge = Vector([sin(radians(0)) * FLANGE_RADIUS,cos(radians(0)) * FLANGE_RADIUS,-HEIGHT + FLANGE_HEIGHT ])
v_Cross = geometry.intersect_line_line(v_0_Deg_Top_Point,Vector([v_0_Deg_Top_Point.x,v_0_Deg_Top_Point.y,-HEIGHT]),v_Flange_Edge,v_Extended_Flange_Edge)
verts.append(v_Cross[0])
-
+
#5deg
v_Flange_Edge = Vector([sin(radians(5)) * FLANGE_RADIUS,cos(radians(5)) * FLANGE_RADIUS,-HEIGHT + FLANGE_HEIGHT ])
v_Cross = geometry.intersect_line_line(v_5_Deg_Top_Point,Vector([v_5_Deg_Top_Point.x,v_5_Deg_Top_Point.y,-HEIGHT]),v_Flange_Edge,v_Extended_Flange_Edge)
verts.append(v_Cross[0])
-
+
#10deg
v_Flange_Edge = Vector([sin(radians(10)) * FLANGE_RADIUS,cos(radians(10)) * FLANGE_RADIUS,-HEIGHT + FLANGE_HEIGHT ])
v_Cross = geometry.intersect_line_line(v_10_Deg_Top_Point,Vector([v_10_Deg_Top_Point.x,v_10_Deg_Top_Point.y,-HEIGHT]),v_Flange_Edge,v_Extended_Flange_Edge)
@@ -1286,13 +1286,13 @@ def Create_12_Point(FLAT, HOLE_DIA, SHANK_DIA, HEIGHT,FLANGE_DIA):
v_Flange_Edge = Vector([sin(radians(20)) * FLANGE_RADIUS,cos(radians(20)) * FLANGE_RADIUS,-HEIGHT + FLANGE_HEIGHT ])
v_Cross = geometry.intersect_line_line(v_20_Deg_Top_Point,Vector([v_20_Deg_Top_Point.x,v_20_Deg_Top_Point.y,-HEIGHT]),v_Flange_Edge,v_Extended_Flange_Edge)
verts.append(v_Cross[0])
-
+
#25deg
v_Flange_Edge = Vector([sin(radians(25)) * FLANGE_RADIUS,cos(radians(25)) * FLANGE_RADIUS,-HEIGHT + FLANGE_HEIGHT ])
v_Cross = geometry.intersect_line_line(v_25_Deg_Top_Point,Vector([v_25_Deg_Top_Point.x,v_25_Deg_Top_Point.y,-HEIGHT]),v_Flange_Edge,v_Extended_Flange_Edge)
verts.append(v_Cross[0])
-
-
+
+
#30deg
v_Flange_Edge = Vector([sin(radians(30)) * FLANGE_RADIUS,cos(radians(30)) * FLANGE_RADIUS,-HEIGHT + FLANGE_HEIGHT ])
v_Cross = geometry.intersect_line_line(v_30_Deg_Top_Point,Vector([v_30_Deg_Top_Point.x,v_30_Deg_Top_Point.y,-HEIGHT]),v_Flange_Edge,v_Extended_Flange_Edge)
@@ -1309,7 +1309,7 @@ def Create_12_Point(FLAT, HOLE_DIA, SHANK_DIA, HEIGHT,FLANGE_DIA):
verts.append([sin(radians(20)) * FLANGE_RADIUS,cos(radians(20)) * FLANGE_RADIUS,-HEIGHT + FLANGE_HEIGHT])
verts.append([sin(radians(25)) * FLANGE_RADIUS,cos(radians(25)) * FLANGE_RADIUS,-HEIGHT + FLANGE_HEIGHT])
verts.append([sin(radians(30)) * FLANGE_RADIUS,cos(radians(30)) * FLANGE_RADIUS,-HEIGHT + FLANGE_HEIGHT])
-
+
Row += 1
verts.append([sin(radians(0)) * FLANGE_RADIUS,cos(radians(0)) * FLANGE_RADIUS,-HEIGHT])
@@ -1319,10 +1319,10 @@ def Create_12_Point(FLAT, HOLE_DIA, SHANK_DIA, HEIGHT,FLANGE_DIA):
verts.append([sin(radians(20)) * FLANGE_RADIUS,cos(radians(20)) * FLANGE_RADIUS,-HEIGHT])
verts.append([sin(radians(25)) * FLANGE_RADIUS,cos(radians(25)) * FLANGE_RADIUS,-HEIGHT])
verts.append([sin(radians(30)) * FLANGE_RADIUS,cos(radians(30)) * FLANGE_RADIUS,-HEIGHT])
-
+
Row += 1
-
+
verts.append([sin(radians(0)) * SHANK_RADIUS,cos(radians(0)) * SHANK_RADIUS,-HEIGHT])
verts.append([sin(radians(0)) * SHANK_RADIUS,cos(radians(0)) * SHANK_RADIUS,-HEIGHT])
verts.append([sin(radians(10)) * SHANK_RADIUS,cos(radians(10)) * SHANK_RADIUS,-HEIGHT])
@@ -1330,7 +1330,7 @@ def Create_12_Point(FLAT, HOLE_DIA, SHANK_DIA, HEIGHT,FLANGE_DIA):
verts.append([sin(radians(20)) * SHANK_RADIUS,cos(radians(20)) * SHANK_RADIUS,-HEIGHT])
verts.append([sin(radians(20)) * SHANK_RADIUS,cos(radians(20)) * SHANK_RADIUS,-HEIGHT])
verts.append([sin(radians(30)) * SHANK_RADIUS,cos(radians(30)) * SHANK_RADIUS,-HEIGHT])
-
+
Row += 1
@@ -1344,8 +1344,8 @@ def Create_12_Point(FLAT, HOLE_DIA, SHANK_DIA, HEIGHT,FLANGE_DIA):
def Create_12_Point_Head(FLAT, HOLE_DIA, SHANK_DIA, HEIGHT,FLANGE_DIA):
#TODO add under head radius
return Create_12_Point(FLAT, HOLE_DIA, SHANK_DIA, HEIGHT,FLANGE_DIA)
-
-
+
+
# ####################################################################
# Create External Thread
@@ -2288,13 +2288,13 @@ def Nut_Mesh(props, context):
Face_Start = len(verts)
-
-
+
+
if props.bf_Nut_Type == 'bf_Nut_12Pnt':
Nut_Height = props.bf_12_Point_Nut_Height
else:
Nut_Height = props.bf_Hex_Nut_Height
-
+
Thread_Verts, Thread_Faces, New_Nut_Height = Create_Internal_Thread(
props.bf_Minor_Dia, props.bf_Major_Dia,
props.bf_Pitch, Nut_Height,
@@ -2305,7 +2305,7 @@ def Nut_Mesh(props, context):
faces.extend(Copy_Faces(Thread_Faces, Face_Start))
Face_Start = len(verts)
-
+
if props.bf_Nut_Type == 'bf_Nut_12Pnt':
Head_Verts, Head_Faces, Lock_Nut_Rad = add_12_Point_Nut(
props.bf_12_Point_Nut_Flat_Distance,
@@ -2397,13 +2397,13 @@ def Bolt_Mesh(props, context):
props.bf_Hex_Head_Flat_Distance, Bit_Dia,
props.bf_Shank_Dia, props.bf_Hex_Head_Height
)
-
+
elif props.bf_Head_Type == 'bf_Head_12Pnt':
Head_Verts, Head_Faces, Head_Height = Create_12_Point_Head(
props.bf_12_Point_Head_Flat_Distance, Bit_Dia,
props.bf_Shank_Dia, props.bf_12_Point_Head_Height,
#Limit the size of the Flange to avoid calculation error
- max(props.bf_12_Point_Head_Flange_Dia,props.bf_12_Point_Head_Flat_Distance)
+ max(props.bf_12_Point_Head_Flange_Dia,props.bf_12_Point_Head_Flat_Distance)
)
elif props.bf_Head_Type == 'bf_Head_Cap':
Head_Verts, Head_Faces, Head_Height = Create_Cap_Head(
diff --git a/add_mesh_extra_objects/add_mesh_round_brilliant.py b/add_mesh_extra_objects/add_mesh_round_brilliant.py
index 49232151..bdd9b68c 100644
--- a/add_mesh_extra_objects/add_mesh_round_brilliant.py
+++ b/add_mesh_extra_objects/add_mesh_round_brilliant.py
@@ -298,7 +298,7 @@ def addBrilliant(context, self, s, table_w, crown_h, girdle_t, pavi_d, bezel_f,
bpy.context.tool_settings.mesh_select_mode = sel_mode
bpy.ops.object.mode_set(mode='OBJECT', toggle=False)
- bpy.ops.object.modifier_apply(apply_as='DATA', modifier="EdgeSplit")
+ bpy.ops.object.modifier_apply(modifier="EdgeSplit")
return dobj
diff --git a/archipack/archipack_object.py b/archipack/archipack_object.py
index 8c908214..f513b506 100644
--- a/archipack/archipack_object.py
+++ b/archipack/archipack_object.py
@@ -259,7 +259,7 @@ class ArchipackDrawTool(ArchipackCollectionManager):
view_vector_mouse = region_2d_to_vector_3d(region, rv3d, co2d)
ray_origin_mouse = region_2d_to_origin_3d(region, rv3d, co2d)
res, pos, normal, face_index, object, matrix_world = context.scene.ray_cast(
- view_layer=context.view_layer,
+ depsgraph=context.view_layer.depsgraph,
origin=ray_origin_mouse,
direction=view_vector_mouse)
return res, pos, normal, face_index, object, matrix_world
diff --git a/archipack/archipack_reference_point.py b/archipack/archipack_reference_point.py
index 99acf9d8..5548511c 100644
--- a/archipack/archipack_reference_point.py
+++ b/archipack/archipack_reference_point.py
@@ -298,8 +298,7 @@ class ARCHIPACK_OT_apply_holes(Operator):
for mod in o.modifiers[:]:
ctx['modifier'] = mod
try:
- bpy.ops.object.modifier_apply(ctx, apply_as='DATA',
- modifier=mod.name)
+ bpy.ops.object.modifier_apply(ctx, modifier=mod.name)
except:
pass
diff --git a/archipack/archipack_wall2.py b/archipack/archipack_wall2.py
index d9a486eb..53375992 100644
--- a/archipack/archipack_wall2.py
+++ b/archipack/archipack_wall2.py
@@ -1639,7 +1639,7 @@ class archipack_wall2(ArchipackObject, Manipulable, PropertyGroup):
# prevent self intersect
o.hide_viewport = True
res, pos, normal, face_index, r, matrix_world = context.scene.ray_cast(
- view_layer=context.view_layer,
+ depsgraph=context.view_layer.depsgraph,
origin=p,
direction=up)
diff --git a/blenderkit/__init__.py b/blenderkit/__init__.py
index 449e65a4..fba80a7e 100644
--- a/blenderkit/__init__.py
+++ b/blenderkit/__init__.py
@@ -19,8 +19,8 @@
bl_info = {
"name": "BlenderKit Online Asset Library",
"author": "Vilem Duha, Petr Dlouhy",
- "version": (1, 0, 30),
- "blender": (2, 82, 0),
+ "version": (1, 0, 31),
+ "blender": (2, 83, 0),
"location": "View3D > Properties > BlenderKit",
"description": "Online BlenderKit library (materials, models, brushes and more). Connects to the internet.",
"warning": "",
@@ -93,6 +93,7 @@ def scene_load(context):
preferences = bpy.context.preferences.addons['blenderkit'].preferences
preferences.login_attempt = False
+
@bpy.app.handlers.persistent
def check_timers_timer():
''' checks if all timers are registered regularly. Prevents possible bugs from stopping the addon.'''
@@ -251,15 +252,18 @@ def switch_search_results(self, context):
def asset_type_callback(self, context):
- # s = bpy.context.scene
- # ui_props = s.blenderkitUI
+ '''
+ Returns
+ items for Enum property, depending on the down_up property - BlenderKit is either in search or in upload mode.
+
+ '''
if self.down_up == 'SEARCH':
items = (
('MODEL', 'Models', 'Find models in the BlenderKit online database', 'OBJECT_DATAMODE', 0),
# ('SCENE', 'SCENE', 'Browse scenes', 'SCENE_DATA', 1),
- ('MATERIAL', 'Materials', 'Find models in the BlenderKit online database', 'MATERIAL', 2),
+ ('MATERIAL', 'Materials', 'Find materials in the BlenderKit online database', 'MATERIAL', 2),
# ('TEXTURE', 'Texture', 'Browse textures', 'TEXTURE', 3),
- ('BRUSH', 'Brushes', 'Find models in the BlenderKit online database', 'BRUSH_DATA', 3)
+ ('BRUSH', 'Brushes', 'Find brushes in the BlenderKit online database', 'BRUSH_DATA', 3)
)
else:
items = (
@@ -481,6 +485,7 @@ class BlenderKitCommonSearchProps(object):
('DELETED', 'Deleted', 'Deleted'),
),
default='ALL',
+ update=search.search_update,
)
@@ -525,7 +530,7 @@ def update_free(self, context):
"Part of subscription is sent to artists based on usage by paying users."
def draw_message(self, context):
- ui_panels.label_multiline(self.layout, text=message, icon='NONE', width=-1)
+ utils.label_multiline(self.layout, text=message, icon='NONE', width=-1)
bpy.context.window_manager.popup_menu(draw_message, title=title, icon='INFO')
@@ -653,17 +658,26 @@ class BlenderKitRatingProps(PropertyGroup):
min=-1, max=10,
update=ratings.update_ratings_quality)
+ # the following enum is only to ease interaction - enums support 'drag over' and enable to draw the stars easily.
+ rating_quality_ui: EnumProperty(name='rating_quality_ui',
+ items=ratings.stars_enum_callback,
+ description='Rating stars 0 - 10',
+ default=None,
+ update=ratings.update_quality_ui,
+ )
+
rating_work_hours: FloatProperty(name="Work Hours",
description="How many hours did this work take?",
- default=0.01,
+ default=0.00,
min=0.0, max=1000, update=ratings.update_ratings_work_hours
)
- rating_complexity: IntProperty(name="Complexity",
- description="Complexity is a number estimating how much work was spent on the asset.aaa",
- default=0, min=0, max=10)
- rating_virtual_price: FloatProperty(name="Virtual Price",
- description="How much would you pay for this object if buing it?",
- default=0, min=0, max=10000)
+
+ # rating_complexity: IntProperty(name="Complexity",
+ # description="Complexity is a number estimating how much work was spent on the asset.aaa",
+ # default=0, min=0, max=10)
+ # rating_virtual_price: FloatProperty(name="Virtual Price",
+ # description="How much would you pay for this object if buing it?",
+ # default=0, min=0, max=10000)
rating_problems: StringProperty(
name="Problems",
description="Problems found/ why did you take points down - this will be available for the author"
@@ -1358,6 +1372,17 @@ class BlenderKitModelSearchProps(PropertyGroup, BlenderKitCommonSearchProps):
max=180,
subtype='ANGLE')
+ perpendicular_snap: BoolProperty(name='Perpendicular snap',
+ description="Limit snapping that is close to perpendicular angles to be perpendicular.",
+ default=True)
+
+ perpendicular_snap_threshold: FloatProperty(name="Threshold",
+ description="Limit perpendicular snap to be below these values.",
+ default=.25,
+ min=0,
+ max=.5,
+ )
+
class BlenderKitSceneSearchProps(PropertyGroup, BlenderKitCommonSearchProps):
search_keywords: StringProperty(
@@ -1392,14 +1417,15 @@ class BlenderKitSceneSearchProps(PropertyGroup, BlenderKitCommonSearchProps):
update=search.search_update
)
+
def fix_subdir(self, context):
'''Fixes project subdicrectory settings if people input invalid path.'''
# pp = pathlib.PurePath(self.project_subdir)
pp = self.project_subdir[:]
- pp = pp.replace('\\','')
- pp = pp.replace('/','')
- pp = pp.replace(':','')
+ pp = pp.replace('\\', '')
+ pp = pp.replace('/', '')
+ pp = pp.replace(':', '')
pp = '//' + pp
if self.project_subdir != pp:
self.project_subdir = pp
@@ -1410,10 +1436,11 @@ def fix_subdir(self, context):
"and uses it for storing assets."
def draw_message(self, context):
- ui_panels.label_multiline(self.layout, text=message, icon='NONE', width=400)
+ utils.label_multiline(self.layout, text=message, icon='NONE', width=400)
bpy.context.window_manager.popup_menu(draw_message, title=title, icon='INFO')
+
class BlenderKitAddonPreferences(AddonPreferences):
# this must match the addon name, use '__package__'
# when defining this in a submodule of a python package.
@@ -1493,7 +1520,7 @@ class BlenderKitAddonPreferences(AddonPreferences):
description="where data will be stored for individual projects",
# subtype='DIR_PATH',
default="//assets",
- update = fix_subdir
+ update=fix_subdir
)
directory_behaviour: EnumProperty(
@@ -1549,19 +1576,21 @@ class BlenderKitAddonPreferences(AddonPreferences):
min=0,
max=20000)
- first_run: BoolProperty(
- name="First run",
- description="Detects if addon was already registered/run.",
- default=True,
- update=utils.save_prefs
- )
+ # this is now made obsolete by the new popup upon registration -ensures the user knows about the first search.
+ # first_run: BoolProperty(
+ # name="First run",
+ # description="Detects if addon was already registered/run.",
+ # default=True,
+ # update=utils.save_prefs
+ # )
use_timers: BoolProperty(
name="Use timers",
- description="Use timers for bkit",
+ description="Use timers for BlenderKit. Usefull for debugging since timers seem to be unstable.",
default=True,
update=utils.save_prefs
)
+
# allow_proximity : BoolProperty(
# name="allow proximity data reports",
# description="This sends anonymized proximity data \n \
@@ -1687,6 +1716,12 @@ def register():
bpy.app.timers.register(check_timers_timer, persistent=True)
bpy.app.handlers.load_post.append(scene_load)
+ # detect if the user just enabled the addon in preferences, thus enable to run
+ for w in bpy.context.window_manager.windows:
+ for a in w.screen.areas:
+ if a.type == 'PREFERENCES':
+ tasks_queue.add_task((bpy.ops.wm.blenderkit_welcome, ('INVOKE_DEFAULT',)), fake_context=True,
+ fake_context_area='PREFERENCES')
def unregister():
diff --git a/blenderkit/append_link.py b/blenderkit/append_link.py
index b6bfb791..56b2857d 100644
--- a/blenderkit/append_link.py
+++ b/blenderkit/append_link.py
@@ -88,6 +88,8 @@ def append_scene(file_name, scenename=None, link=False, fake_user=False):
def link_collection(file_name, obnames=[], location=(0, 0, 0), link=False, parent = None, **kwargs):
'''link an instanced group - model type asset'''
sel = utils.selection_get()
+ print('link collection')
+ print(kwargs)
with bpy.data.libraries.load(file_name, link=link, relative=True) as (data_from, data_to):
scols = []
@@ -115,6 +117,12 @@ def link_collection(file_name, obnames=[], location=(0, 0, 0), link=False, paren
main_object.instance_collection = col
break;
+ #sometimes, the lib might already be without the actual link.
+ if not main_object.instance_collection and kwargs['name']:
+ col = bpy.data.collections.get(kwargs['name'])
+ if col:
+ main_object.instance_collection = col
+
main_object.name = main_object.instance_collection.name
# bpy.ops.wm.link(directory=file_name + "/Collection/", filename=kwargs['name'], link=link, instance_collections=True,
@@ -181,9 +189,43 @@ def append_particle_system(file_name, obnames=[], location=(0, 0, 0), link=False
def append_objects(file_name, obnames=[], location=(0, 0, 0), link=False, **kwargs):
'''append objects into scene individually'''
-
+ #simplified version of append
+ if kwargs.get('name'):
+ # by now used for appending into scene
+ scene = bpy.context.scene
+ sel = utils.selection_get()
+ bpy.ops.object.select_all(action='DESELECT')
+
+ path = file_name + "\\Collection\\"
+ object_name = kwargs.get('name')
+ fc = utils.get_fake_context(bpy.context, area_type='VIEW_3D')
+ bpy.ops.wm.append(fc, filename=object_name, directory=path)
+
+
+ return_obs = []
+ for ob in bpy.context.scene.objects:
+ if ob.select_get():
+ return_obs.append(ob)
+ if not ob.parent:
+ main_object = ob
+ ob.location = location
+
+ if kwargs.get('rotation'):
+ main_object.rotation_euler = kwargs['rotation']
+
+ if kwargs.get('parent') is not None:
+ main_object.parent = bpy.data.objects[kwargs['parent']]
+ main_object.matrix_world.translation = location
+
+ bpy.ops.object.select_all(action='DESELECT')
+ utils.selection_set(sel)
+
+ return main_object, return_obs
+ #this is used for uploads:
with bpy.data.libraries.load(file_name, link=link, relative=True) as (data_from, data_to):
sobs = []
+ # for col in data_from.collections:
+ # if col == kwargs.get('name'):
for ob in data_from.objects:
if ob in obnames or obnames == []:
sobs.append(ob)
@@ -213,12 +255,15 @@ def append_objects(file_name, obnames=[], location=(0, 0, 0), link=False, **kwar
hidden_objects.append(obj)
obj.hide_viewport = False
return_obs.append(obj)
+
# Only after all objects are in scene! Otherwise gets broken relationships
if link == True:
bpy.ops.object.make_local(type='SELECT_OBJECT')
for ob in hidden_objects:
ob.hide_viewport = True
+ print(return_obs)
+ print(main_object)
if kwargs.get('rotation') is not None:
main_object.rotation_euler = kwargs['rotation']
diff --git a/blenderkit/bkit_oauth.py b/blenderkit/bkit_oauth.py
index 4d2f09dc..59ed6c8b 100644
--- a/blenderkit/bkit_oauth.py
+++ b/blenderkit/bkit_oauth.py
@@ -26,8 +26,9 @@ if "bpy" in locals():
categories = reload(categories)
oauth = reload(oauth)
ui = reload(ui)
+ ui = reload(ui_panels)
else:
- from blenderkit import tasks_queue, utils, paths, search, categories, oauth, ui
+ from blenderkit import tasks_queue, utils, paths, search, categories, oauth, ui, ui_panels
import bpy
@@ -70,7 +71,7 @@ def refresh_token_thread():
thread = threading.Thread(target=refresh_token, args=([preferences.api_key_refresh, url]), daemon=True)
thread.start()
else:
- ui.add_report('Already Refreshing token, will be ready soon.')
+ ui.add_report('Already Refreshing token, will be ready soon. If this fails, please login again in Login panel.')
def refresh_token(api_key_refresh, url):
@@ -102,7 +103,7 @@ class RegisterLoginOnline(bpy.types.Operator):
"""Login online on BlenderKit webpage"""
bl_idname = "wm.blenderkit_login"
- bl_label = "BlenderKit login or signup"
+ bl_label = "BlenderKit login/signup"
bl_options = {'REGISTER', 'UNDO'}
signup: BoolProperty(
@@ -112,16 +113,32 @@ class RegisterLoginOnline(bpy.types.Operator):
options={'SKIP_SAVE'}
)
+ message: bpy.props.StringProperty(
+ name="Message",
+ description="",
+ default="You were logged out from BlenderKit.\n Clicking OK takes you to web login. ")
+
@classmethod
def poll(cls, context):
return True
+ def draw(self, context):
+ layout = self.layout
+ utils.label_multiline(layout, text=self.message, width = 300)
+
def execute(self, context):
preferences = bpy.context.preferences.addons['blenderkit'].preferences
preferences.login_attempt = True
login_thread(self.signup)
return {'FINISHED'}
+ def invoke(self, context, event):
+ wm = bpy.context.window_manager
+ preferences = bpy.context.preferences.addons['blenderkit'].preferences
+ preferences.api_key_refresh = ''
+ preferences.api_key = ''
+ return wm.invoke_props_dialog(self)
+
class Logout(bpy.types.Operator):
"""Logout from BlenderKit immediately"""
@@ -139,7 +156,8 @@ class Logout(bpy.types.Operator):
preferences.login_attempt = False
preferences.api_key_refresh = ''
preferences.api_key = ''
- del (bpy.context.window_manager['bkit profile'])
+ if bpy.context.window_manager.get('bkit profile'):
+ del (bpy.context.window_manager['bkit profile'])
return {'FINISHED'}
diff --git a/blenderkit/data/categories.json b/blenderkit/data/categories.json
index 2eb34a34..d6286050 100644
--- a/blenderkit/data/categories.json
+++ b/blenderkit/data/categories.json
@@ -1,22 +1,5 @@
[
{
- "name": "addon",
- "slug": "addon",
- "active": true,
- "thumbnail": null,
- "thumbnailWidth": null,
- "thumbnailHeight": null,
- "order": 0,
- "alternateTitle": "addon",
- "alternateUrl": "",
- "description": "",
- "metaKeywords": "",
- "metaExtra": "",
- "children": [],
- "assetCount": 3,
- "assetCountCumulative": 3
- },
- {
"name": "brush",
"slug": "brush",
"active": true,
@@ -99,23 +82,6 @@
"assetCountCumulative": 8
},
{
- "name": "crack",
- "slug": "crack",
- "active": true,
- "thumbnail": null,
- "thumbnailWidth": null,
- "thumbnailHeight": null,
- "order": 0,
- "alternateTitle": "crack",
- "alternateUrl": "",
- "description": "",
- "metaKeywords": "",
- "metaExtra": "",
- "children": [],
- "assetCount": 0,
- "assetCountCumulative": 0
- },
- {
"name": "cut",
"slug": "cut",
"active": true,
@@ -269,23 +235,6 @@
"assetCountCumulative": 1
},
{
- "name": "nature",
- "slug": "nature-brush",
- "active": true,
- "thumbnail": null,
- "thumbnailWidth": null,
- "thumbnailHeight": null,
- "order": 0,
- "alternateTitle": "nature",
- "alternateUrl": "",
- "description": "",
- "metaKeywords": "",
- "metaExtra": "",
- "children": [],
- "assetCount": 1,
- "assetCountCumulative": 1
- },
- {
"name": "pattern",
"slug": "pattern",
"active": true,
@@ -316,42 +265,8 @@
"metaKeywords": "",
"metaExtra": "",
"children": [],
- "assetCount": 7,
- "assetCountCumulative": 7
- },
- {
- "name": "rust",
- "slug": "rust-brush",
- "active": true,
- "thumbnail": null,
- "thumbnailWidth": null,
- "thumbnailHeight": null,
- "order": 0,
- "alternateTitle": "rust",
- "alternateUrl": "",
- "description": "",
- "metaKeywords": "",
- "metaExtra": "",
- "children": [],
- "assetCount": 0,
- "assetCountCumulative": 0
- },
- {
- "name": "sculpture",
- "slug": "sculpture-brush",
- "active": true,
- "thumbnail": null,
- "thumbnailWidth": null,
- "thumbnailHeight": null,
- "order": 0,
- "alternateTitle": "sculpture",
- "alternateUrl": "",
- "description": "",
- "metaKeywords": "",
- "metaExtra": "",
- "children": [],
- "assetCount": 0,
- "assetCountCumulative": 0
+ "assetCount": 8,
+ "assetCountCumulative": 8
},
{
"name": "stitches",
@@ -371,23 +286,6 @@
"assetCountCumulative": 12
},
{
- "name": "stone",
- "slug": "stone-brush",
- "active": true,
- "thumbnail": null,
- "thumbnailWidth": null,
- "thumbnailHeight": null,
- "order": 0,
- "alternateTitle": "stone",
- "alternateUrl": "",
- "description": "",
- "metaKeywords": "",
- "metaExtra": "",
- "children": [],
- "assetCount": 0,
- "assetCountCumulative": 0
- },
- {
"name": "tree",
"slug": "tree-brush",
"active": true,
@@ -403,23 +301,6 @@
"children": [],
"assetCount": 4,
"assetCountCumulative": 4
- },
- {
- "name": "wood",
- "slug": "wood-brush",
- "active": true,
- "thumbnail": null,
- "thumbnailWidth": null,
- "thumbnailHeight": null,
- "order": 0,
- "alternateTitle": "wood",
- "alternateUrl": "",
- "description": "",
- "metaKeywords": "",
- "metaExtra": "",
- "children": [],
- "assetCount": 0,
- "assetCountCumulative": 0
}
],
"assetCount": 94,
@@ -453,8 +334,8 @@
"metaKeywords": "",
"metaExtra": "",
"children": [],
- "assetCount": 2,
- "assetCountCumulative": 2
+ "assetCount": 36,
+ "assetCountCumulative": 36
},
{
"name": "asphalt",
@@ -470,8 +351,8 @@
"metaKeywords": "",
"metaExtra": "",
"children": [],
- "assetCount": 5,
- "assetCountCumulative": 5
+ "assetCount": 36,
+ "assetCountCumulative": 36
},
{
"name": "bricks",
@@ -487,8 +368,8 @@
"metaKeywords": "",
"metaExtra": "",
"children": [],
- "assetCount": 10,
- "assetCountCumulative": 10
+ "assetCount": 64,
+ "assetCountCumulative": 64
},
{
"name": "ceramic",
@@ -504,8 +385,8 @@
"metaKeywords": "",
"metaExtra": "",
"children": [],
- "assetCount": 2,
- "assetCountCumulative": 2
+ "assetCount": 16,
+ "assetCountCumulative": 16
},
{
"name": "concrete",
@@ -521,8 +402,8 @@
"metaKeywords": "",
"metaExtra": "",
"children": [],
- "assetCount": 21,
- "assetCountCumulative": 21
+ "assetCount": 64,
+ "assetCountCumulative": 64
},
{
"name": "dirt",
@@ -538,8 +419,8 @@
"metaKeywords": "",
"metaExtra": "",
"children": [],
- "assetCount": 6,
- "assetCountCumulative": 6
+ "assetCount": 25,
+ "assetCountCumulative": 25
},
{
"name": "fabric",
@@ -555,8 +436,8 @@
"metaKeywords": "",
"metaExtra": "",
"children": [],
- "assetCount": 41,
- "assetCountCumulative": 41
+ "assetCount": 169,
+ "assetCountCumulative": 169
},
{
"name": "floor",
@@ -572,8 +453,8 @@
"metaKeywords": "",
"metaExtra": "",
"children": [],
- "assetCount": 27,
- "assetCountCumulative": 27
+ "assetCount": 41,
+ "assetCountCumulative": 41
},
{
"name": "food",
@@ -589,8 +470,8 @@
"metaKeywords": "",
"metaExtra": "",
"children": [],
- "assetCount": 2,
- "assetCountCumulative": 2
+ "assetCount": 30,
+ "assetCountCumulative": 30
},
{
"name": "fx",
@@ -606,8 +487,8 @@
"metaKeywords": "",
"metaExtra": "",
"children": [],
- "assetCount": 1,
- "assetCountCumulative": 1
+ "assetCount": 19,
+ "assetCountCumulative": 19
},
{
"name": "glass",
@@ -623,25 +504,8 @@
"metaKeywords": "",
"metaExtra": "",
"children": [],
- "assetCount": 9,
- "assetCountCumulative": 9
- },
- {
- "name": "grass",
- "slug": "grass",
- "active": true,
- "thumbnail": null,
- "thumbnailWidth": null,
- "thumbnailHeight": null,
- "order": 0,
- "alternateTitle": "grass",
- "alternateUrl": "",
- "description": "",
- "metaKeywords": "",
- "metaExtra": "",
- "children": [],
- "assetCount": 0,
- "assetCountCumulative": 0
+ "assetCount": 28,
+ "assetCountCumulative": 28
},
{
"name": "ground",
@@ -657,25 +521,8 @@
"metaKeywords": "",
"metaExtra": "",
"children": [],
- "assetCount": 12,
- "assetCountCumulative": 12
- },
- {
- "name": "human",
- "slug": "human",
- "active": true,
- "thumbnail": null,
- "thumbnailWidth": null,
- "thumbnailHeight": null,
- "order": 0,
- "alternateTitle": "human",
- "alternateUrl": "",
- "description": "",
- "metaKeywords": "",
- "metaExtra": "",
- "children": [],
- "assetCount": 0,
- "assetCountCumulative": 0
+ "assetCount": 70,
+ "assetCountCumulative": 70
},
{
"name": "ice",
@@ -691,8 +538,8 @@
"metaKeywords": "",
"metaExtra": "",
"children": [],
- "assetCount": 2,
- "assetCountCumulative": 2
+ "assetCount": 20,
+ "assetCountCumulative": 20
},
{
"name": "leather",
@@ -708,8 +555,8 @@
"metaKeywords": "",
"metaExtra": "",
"children": [],
- "assetCount": 6,
- "assetCountCumulative": 6
+ "assetCount": 37,
+ "assetCountCumulative": 37
},
{
"name": "liquid",
@@ -725,25 +572,8 @@
"metaKeywords": "",
"metaExtra": "",
"children": [],
- "assetCount": 5,
- "assetCountCumulative": 5
- },
- {
- "name": "marble",
- "slug": "marble",
- "active": true,
- "thumbnail": null,
- "thumbnailWidth": null,
- "thumbnailHeight": null,
- "order": 0,
- "alternateTitle": "marble",
- "alternateUrl": "",
- "description": "",
- "metaKeywords": "",
- "metaExtra": "",
- "children": [],
- "assetCount": 0,
- "assetCountCumulative": 0
+ "assetCount": 7,
+ "assetCountCumulative": 7
},
{
"name": "metal",
@@ -759,8 +589,8 @@
"metaKeywords": "",
"metaExtra": "",
"children": [],
- "assetCount": 37,
- "assetCountCumulative": 37
+ "assetCount": 119,
+ "assetCountCumulative": 119
},
{
"name": "organic",
@@ -776,8 +606,8 @@
"metaKeywords": "",
"metaExtra": "",
"children": [],
- "assetCount": 3,
- "assetCountCumulative": 3
+ "assetCount": 29,
+ "assetCountCumulative": 29
},
{
"name": "ornaments",
@@ -793,8 +623,8 @@
"metaKeywords": "",
"metaExtra": "",
"children": [],
- "assetCount": 1,
- "assetCountCumulative": 1
+ "assetCount": 3,
+ "assetCountCumulative": 3
},
{
"name": "paper",
@@ -810,8 +640,8 @@
"metaKeywords": "",
"metaExtra": "",
"children": [],
- "assetCount": 26,
- "assetCountCumulative": 26
+ "assetCount": 30,
+ "assetCountCumulative": 30
},
{
"name": "paving",
@@ -827,8 +657,8 @@
"metaKeywords": "",
"metaExtra": "",
"children": [],
- "assetCount": 9,
- "assetCountCumulative": 9
+ "assetCount": 32,
+ "assetCountCumulative": 32
},
{
"name": "plaster",
@@ -844,8 +674,8 @@
"metaKeywords": "",
"metaExtra": "",
"children": [],
- "assetCount": 8,
- "assetCountCumulative": 8
+ "assetCount": 34,
+ "assetCountCumulative": 34
},
{
"name": "plastic",
@@ -861,8 +691,8 @@
"metaKeywords": "",
"metaExtra": "",
"children": [],
- "assetCount": 3,
- "assetCountCumulative": 3
+ "assetCount": 25,
+ "assetCountCumulative": 25
},
{
"name": "rock",
@@ -878,8 +708,8 @@
"metaKeywords": "",
"metaExtra": "",
"children": [],
- "assetCount": 7,
- "assetCountCumulative": 7
+ "assetCount": 21,
+ "assetCountCumulative": 21
},
{
"name": "roofing",
@@ -895,8 +725,8 @@
"metaKeywords": "",
"metaExtra": "",
"children": [],
- "assetCount": 2,
- "assetCountCumulative": 2
+ "assetCount": 12,
+ "assetCountCumulative": 12
},
{
"name": "rubber",
@@ -912,8 +742,8 @@
"metaKeywords": "",
"metaExtra": "",
"children": [],
- "assetCount": 1,
- "assetCountCumulative": 1
+ "assetCount": 8,
+ "assetCountCumulative": 8
},
{
"name": "rust",
@@ -929,8 +759,8 @@
"metaKeywords": "",
"metaExtra": "",
"children": [],
- "assetCount": 5,
- "assetCountCumulative": 5
+ "assetCount": 17,
+ "assetCountCumulative": 17
},
{
"name": "sand",
@@ -946,25 +776,8 @@
"metaKeywords": "",
"metaExtra": "",
"children": [],
- "assetCount": 2,
- "assetCountCumulative": 2
- },
- {
- "name": "soil",
- "slug": "soil",
- "active": true,
- "thumbnail": null,
- "thumbnailWidth": null,
- "thumbnailHeight": null,
- "order": 0,
- "alternateTitle": "soil",
- "alternateUrl": "",
- "description": "",
- "metaKeywords": "",
- "metaExtra": "",
- "children": [],
- "assetCount": 0,
- "assetCountCumulative": 0
+ "assetCount": 16,
+ "assetCountCumulative": 16
},
{
"name": "stone",
@@ -980,8 +793,8 @@
"metaKeywords": "",
"metaExtra": "",
"children": [],
- "assetCount": 11,
- "assetCountCumulative": 11
+ "assetCount": 117,
+ "assetCountCumulative": 117
},
{
"name": "tech",
@@ -997,8 +810,8 @@
"metaKeywords": "",
"metaExtra": "",
"children": [],
- "assetCount": 3,
- "assetCountCumulative": 3
+ "assetCount": 34,
+ "assetCountCumulative": 34
},
{
"name": "tiles",
@@ -1014,8 +827,8 @@
"metaKeywords": "",
"metaExtra": "",
"children": [],
- "assetCount": 19,
- "assetCountCumulative": 19
+ "assetCount": 84,
+ "assetCountCumulative": 84
},
{
"name": "wood",
@@ -1031,12 +844,12 @@
"metaKeywords": "",
"metaExtra": "",
"children": [],
- "assetCount": 42,
- "assetCountCumulative": 42
+ "assetCount": 121,
+ "assetCountCumulative": 121
}
],
- "assetCount": 331,
- "assetCountCumulative": 331
+ "assetCount": 1365,
+ "assetCountCumulative": 1365
},
{
"name": "model",
@@ -1080,97 +893,12 @@
"metaKeywords": "",
"metaExtra": "",
"children": [],
- "assetCount": 0,
- "assetCountCumulative": 0
- },
- {
- "name": "helicopter",
- "slug": "helicopter",
- "active": true,
- "thumbnail": null,
- "thumbnailWidth": null,
- "thumbnailHeight": null,
- "order": 0,
- "alternateTitle": "helicopter",
- "alternateUrl": "",
- "description": "",
- "metaKeywords": "",
- "metaExtra": "",
- "children": [],
- "assetCount": 0,
- "assetCountCumulative": 0
- },
- {
- "name": "historic",
- "slug": "historic-aircraft",
- "active": true,
- "thumbnail": null,
- "thumbnailWidth": null,
- "thumbnailHeight": null,
- "order": 0,
- "alternateTitle": "historic",
- "alternateUrl": "",
- "description": "",
- "metaKeywords": "",
- "metaExtra": "",
- "children": [],
- "assetCount": 0,
- "assetCountCumulative": 0
- },
- {
- "name": "jet",
- "slug": "jet",
- "active": true,
- "thumbnail": null,
- "thumbnailWidth": null,
- "thumbnailHeight": null,
- "order": 0,
- "alternateTitle": "jet",
- "alternateUrl": "",
- "description": "",
- "metaKeywords": "",
- "metaExtra": "",
- "children": [],
- "assetCount": 0,
- "assetCountCumulative": 0
- },
- {
- "name": "part",
- "slug": "part-aircraft",
- "active": true,
- "thumbnail": null,
- "thumbnailWidth": null,
- "thumbnailHeight": null,
- "order": 0,
- "alternateTitle": "part",
- "alternateUrl": "",
- "description": "",
- "metaKeywords": "",
- "metaExtra": "",
- "children": [],
- "assetCount": 0,
- "assetCountCumulative": 0
- },
- {
- "name": "private",
- "slug": "private",
- "active": true,
- "thumbnail": null,
- "thumbnailWidth": null,
- "thumbnailHeight": null,
- "order": 0,
- "alternateTitle": "private",
- "alternateUrl": "",
- "description": "",
- "metaKeywords": "",
- "metaExtra": "",
- "children": [],
- "assetCount": 0,
- "assetCountCumulative": 0
+ "assetCount": 20,
+ "assetCountCumulative": 20
}
],
- "assetCount": 0,
- "assetCountCumulative": 0
+ "assetCount": 20,
+ "assetCountCumulative": 20
},
{
"name": "architecture",
@@ -1200,12 +928,12 @@
"metaKeywords": "",
"metaExtra": "",
"children": [],
- "assetCount": 4,
- "assetCountCumulative": 4
+ "assetCount": 133,
+ "assetCountCumulative": 133
}
],
- "assetCount": 5,
- "assetCountCumulative": 5
+ "assetCount": 137,
+ "assetCountCumulative": 137
},
{
"name": "art",
@@ -1222,40 +950,6 @@
"metaExtra": "",
"children": [
{
- "name": "design",
- "slug": "design",
- "active": true,
- "thumbnail": null,
- "thumbnailWidth": null,
- "thumbnailHeight": null,
- "order": 0,
- "alternateTitle": "design",
- "alternateUrl": "",
- "description": "",
- "metaKeywords": "",
- "metaExtra": "",
- "children": [],
- "assetCount": 0,
- "assetCountCumulative": 0
- },
- {
- "name": "drawing",
- "slug": "drawing",
- "active": true,
- "thumbnail": null,
- "thumbnailWidth": null,
- "thumbnailHeight": null,
- "order": 0,
- "alternateTitle": "drawing",
- "alternateUrl": "",
- "description": "",
- "metaKeywords": "",
- "metaExtra": "",
- "children": [],
- "assetCount": 0,
- "assetCountCumulative": 0
- },
- {
"name": "literature",
"slug": "literature",
"active": true,
@@ -1286,8 +980,8 @@
"metaKeywords": "",
"metaExtra": "",
"children": [],
- "assetCount": 8,
- "assetCountCumulative": 8
+ "assetCount": 15,
+ "assetCountCumulative": 15
},
{
"name": "sculpture",
@@ -1303,8 +997,8 @@
"metaKeywords": "",
"metaExtra": "",
"children": [],
- "assetCount": 8,
- "assetCountCumulative": 8
+ "assetCount": 15,
+ "assetCountCumulative": 15
},
{
"name": "supplies",
@@ -1320,12 +1014,12 @@
"metaKeywords": "",
"metaExtra": "",
"children": [],
- "assetCount": 1,
- "assetCountCumulative": 1
+ "assetCount": 8,
+ "assetCountCumulative": 8
}
],
- "assetCount": 26,
- "assetCountCumulative": 26
+ "assetCount": 49,
+ "assetCountCumulative": 49
},
{
"name": "character",
@@ -1355,25 +1049,8 @@
"metaKeywords": "",
"metaExtra": "",
"children": [],
- "assetCount": 4,
- "assetCountCumulative": 4
- },
- {
- "name": "child",
- "slug": "child",
- "active": true,
- "thumbnail": null,
- "thumbnailWidth": null,
- "thumbnailHeight": null,
- "order": 0,
- "alternateTitle": "child",
- "alternateUrl": "",
- "description": "",
- "metaKeywords": "",
- "metaExtra": "",
- "children": [],
- "assetCount": 0,
- "assetCountCumulative": 0
+ "assetCount": 14,
+ "assetCountCumulative": 14
},
{
"name": "clothing",
@@ -1389,8 +1066,8 @@
"metaKeywords": "",
"metaExtra": "",
"children": [],
- "assetCount": 4,
- "assetCountCumulative": 4
+ "assetCount": 9,
+ "assetCountCumulative": 9
},
{
"name": "fantasy",
@@ -1406,8 +1083,8 @@
"metaKeywords": "",
"metaExtra": "",
"children": [],
- "assetCount": 0,
- "assetCountCumulative": 0
+ "assetCount": 2,
+ "assetCountCumulative": 2
},
{
"name": "man",
@@ -1423,42 +1100,8 @@
"metaKeywords": "",
"metaExtra": "",
"children": [],
- "assetCount": 7,
- "assetCountCumulative": 7
- },
- {
- "name": "people",
- "slug": "people",
- "active": true,
- "thumbnail": null,
- "thumbnailWidth": null,
- "thumbnailHeight": null,
- "order": 0,
- "alternateTitle": "people",
- "alternateUrl": "",
- "description": "",
- "metaKeywords": "",
- "metaExtra": "",
- "children": [],
- "assetCount": 0,
- "assetCountCumulative": 0
- },
- {
- "name": "sci-fi",
- "slug": "sci-fi-character",
- "active": true,
- "thumbnail": null,
- "thumbnailWidth": null,
- "thumbnailHeight": null,
- "order": 0,
- "alternateTitle": "sci-fi",
- "alternateUrl": "",
- "description": "",
- "metaKeywords": "",
- "metaExtra": "",
- "children": [],
- "assetCount": 0,
- "assetCountCumulative": 0
+ "assetCount": 13,
+ "assetCountCumulative": 13
},
{
"name": "woman",
@@ -1478,8 +1121,8 @@
"assetCountCumulative": 7
}
],
- "assetCount": 22,
- "assetCountCumulative": 22
+ "assetCount": 46,
+ "assetCountCumulative": 46
},
{
"name": "exterior",
@@ -1509,8 +1152,8 @@
"metaKeywords": "",
"metaExtra": "",
"children": [],
- "assetCount": 4,
- "assetCountCumulative": 4
+ "assetCount": 24,
+ "assetCountCumulative": 24
},
{
"name": "cityspace",
@@ -1526,76 +1169,8 @@
"metaKeywords": "",
"metaExtra": "",
"children": [],
- "assetCount": 6,
- "assetCountCumulative": 6
- },
- {
- "name": "historic",
- "slug": "historic",
- "active": true,
- "thumbnail": null,
- "thumbnailWidth": null,
- "thumbnailHeight": null,
- "order": 0,
- "alternateTitle": "historic",
- "alternateUrl": "",
- "description": "",
- "metaKeywords": "",
- "metaExtra": "",
- "children": [],
- "assetCount": 0,
- "assetCountCumulative": 0
- },
- {
- "name": "house",
- "slug": "house",
- "active": true,
- "thumbnail": null,
- "thumbnailWidth": null,
- "thumbnailHeight": null,
- "order": 0,
- "alternateTitle": "house",
- "alternateUrl": "",
- "description": "",
- "metaKeywords": "",
- "metaExtra": "",
- "children": [],
- "assetCount": 0,
- "assetCountCumulative": 0
- },
- {
- "name": "industrial",
- "slug": "industrial-exterior",
- "active": true,
- "thumbnail": null,
- "thumbnailWidth": null,
- "thumbnailHeight": null,
- "order": 0,
- "alternateTitle": "industrial",
- "alternateUrl": "",
- "description": "",
- "metaKeywords": "",
- "metaExtra": "",
- "children": [],
- "assetCount": 0,
- "assetCountCumulative": 0
- },
- {
- "name": "landmark",
- "slug": "landmark",
- "active": true,
- "thumbnail": null,
- "thumbnailWidth": null,
- "thumbnailHeight": null,
- "order": 0,
- "alternateTitle": "landmark",
- "alternateUrl": "",
- "description": "",
- "metaKeywords": "",
- "metaExtra": "",
- "children": [],
- "assetCount": 0,
- "assetCountCumulative": 0
+ "assetCount": 24,
+ "assetCountCumulative": 24
},
{
"name": "landscape",
@@ -1611,8 +1186,8 @@
"metaKeywords": "",
"metaExtra": "",
"children": [],
- "assetCount": 12,
- "assetCountCumulative": 12
+ "assetCount": 35,
+ "assetCountCumulative": 35
},
{
"name": "public",
@@ -1628,42 +1203,8 @@
"metaKeywords": "",
"metaExtra": "",
"children": [],
- "assetCount": 4,
- "assetCountCumulative": 4
- },
- {
- "name": "sci-fi",
- "slug": "sci-fi",
- "active": true,
- "thumbnail": null,
- "thumbnailWidth": null,
- "thumbnailHeight": null,
- "order": 0,
- "alternateTitle": "sci-fi",
- "alternateUrl": "",
- "description": "",
- "metaKeywords": "",
- "metaExtra": "",
- "children": [],
- "assetCount": 0,
- "assetCountCumulative": 0
- },
- {
- "name": "stadium",
- "slug": "stadium",
- "active": true,
- "thumbnail": null,
- "thumbnailWidth": null,
- "thumbnailHeight": null,
- "order": 0,
- "alternateTitle": "stadium",
- "alternateUrl": "",
- "description": "",
- "metaKeywords": "",
- "metaExtra": "",
- "children": [],
- "assetCount": 0,
- "assetCountCumulative": 0
+ "assetCount": 12,
+ "assetCountCumulative": 12
},
{
"name": "street",
@@ -1679,12 +1220,12 @@
"metaKeywords": "",
"metaExtra": "",
"children": [],
- "assetCount": 1,
- "assetCountCumulative": 1
+ "assetCount": 35,
+ "assetCountCumulative": 35
}
],
- "assetCount": 27,
- "assetCountCumulative": 27
+ "assetCount": 132,
+ "assetCountCumulative": 132
},
{
"name": "food & drink",
@@ -1714,8 +1255,8 @@
"metaKeywords": "",
"metaExtra": "",
"children": [],
- "assetCount": 10,
- "assetCountCumulative": 10
+ "assetCount": 33,
+ "assetCountCumulative": 33
},
{
"name": "drink",
@@ -1731,8 +1272,8 @@
"metaKeywords": "",
"metaExtra": "",
"children": [],
- "assetCount": 14,
- "assetCountCumulative": 14
+ "assetCount": 27,
+ "assetCountCumulative": 27
},
{
"name": "drugs",
@@ -1748,8 +1289,8 @@
"metaKeywords": "",
"metaExtra": "",
"children": [],
- "assetCount": 3,
- "assetCountCumulative": 3
+ "assetCount": 7,
+ "assetCountCumulative": 7
},
{
"name": "food",
@@ -1765,12 +1306,12 @@
"metaKeywords": "",
"metaExtra": "",
"children": [],
- "assetCount": 8,
- "assetCountCumulative": 8
+ "assetCount": 47,
+ "assetCountCumulative": 47
}
],
- "assetCount": 35,
- "assetCountCumulative": 35
+ "assetCount": 117,
+ "assetCountCumulative": 117
},
{
"name": "furniture",
@@ -1800,8 +1341,8 @@
"metaKeywords": "",
"metaExtra": "",
"children": [],
- "assetCount": 3,
- "assetCountCumulative": 3
+ "assetCount": 21,
+ "assetCountCumulative": 21
},
{
"name": "carpet",
@@ -1817,8 +1358,8 @@
"metaKeywords": "",
"metaExtra": "",
"children": [],
- "assetCount": 7,
- "assetCountCumulative": 7
+ "assetCount": 10,
+ "assetCountCumulative": 10
},
{
"name": "desk",
@@ -1834,8 +1375,8 @@
"metaKeywords": "",
"metaExtra": "",
"children": [],
- "assetCount": 5,
- "assetCountCumulative": 5
+ "assetCount": 21,
+ "assetCountCumulative": 21
},
{
"name": "fireplace",
@@ -1851,8 +1392,8 @@
"metaKeywords": "",
"metaExtra": "",
"children": [],
- "assetCount": 1,
- "assetCountCumulative": 1
+ "assetCount": 8,
+ "assetCountCumulative": 8
},
{
"name": "lighting",
@@ -1868,8 +1409,8 @@
"metaKeywords": "",
"metaExtra": "",
"children": [],
- "assetCount": 19,
- "assetCountCumulative": 19
+ "assetCount": 106,
+ "assetCountCumulative": 106
},
{
"name": "seating",
@@ -1885,8 +1426,8 @@
"metaKeywords": "",
"metaExtra": "",
"children": [],
- "assetCount": 36,
- "assetCountCumulative": 36
+ "assetCount": 179,
+ "assetCountCumulative": 179
},
{
"name": "shelving",
@@ -1902,8 +1443,8 @@
"metaKeywords": "",
"metaExtra": "",
"children": [],
- "assetCount": 17,
- "assetCountCumulative": 17
+ "assetCount": 38,
+ "assetCountCumulative": 38
},
{
"name": "sofa",
@@ -1919,8 +1460,8 @@
"metaKeywords": "",
"metaExtra": "",
"children": [],
- "assetCount": 9,
- "assetCountCumulative": 9
+ "assetCount": 57,
+ "assetCountCumulative": 57
},
{
"name": "storage",
@@ -1936,8 +1477,8 @@
"metaKeywords": "",
"metaExtra": "",
"children": [],
- "assetCount": 24,
- "assetCountCumulative": 24
+ "assetCount": 65,
+ "assetCountCumulative": 65
},
{
"name": "table",
@@ -1953,12 +1494,12 @@
"metaKeywords": "",
"metaExtra": "",
"children": [],
- "assetCount": 36,
- "assetCountCumulative": 36
+ "assetCount": 106,
+ "assetCountCumulative": 106
}
],
- "assetCount": 157,
- "assetCountCumulative": 157
+ "assetCount": 611,
+ "assetCountCumulative": 611
},
{
"name": "industrial",
@@ -1975,57 +1516,6 @@
"metaExtra": "",
"children": [
{
- "name": "agriculture",
- "slug": "agriculture",
- "active": true,
- "thumbnail": null,
- "thumbnailWidth": null,
- "thumbnailHeight": null,
- "order": 0,
- "alternateTitle": "agriculture",
- "alternateUrl": "",
- "description": "",
- "metaKeywords": "",
- "metaExtra": "",
- "children": [],
- "assetCount": 0,
- "assetCountCumulative": 0
- },
- {
- "name": "communication",
- "slug": "communication",
- "active": true,
- "thumbnail": null,
- "thumbnailWidth": null,
- "thumbnailHeight": null,
- "order": 0,
- "alternateTitle": "communication",
- "alternateUrl": "",
- "description": "",
- "metaKeywords": "",
- "metaExtra": "",
- "children": [],
- "assetCount": 0,
- "assetCountCumulative": 0
- },
- {
- "name": "construction",
- "slug": "construction",
- "active": true,
- "thumbnail": null,
- "thumbnailWidth": null,
- "thumbnailHeight": null,
- "order": 0,
- "alternateTitle": "construction",
- "alternateUrl": "",
- "description": "",
- "metaKeywords": "",
- "metaExtra": "",
- "children": [],
- "assetCount": 0,
- "assetCountCumulative": 0
- },
- {
"name": "container",
"slug": "container-industrial",
"active": true,
@@ -2039,25 +1529,8 @@
"metaKeywords": "",
"metaExtra": "",
"children": [],
- "assetCount": 11,
- "assetCountCumulative": 11
- },
- {
- "name": "machine",
- "slug": "machine",
- "active": true,
- "thumbnail": null,
- "thumbnailWidth": null,
- "thumbnailHeight": null,
- "order": 0,
- "alternateTitle": "machine",
- "alternateUrl": "",
- "description": "",
- "metaKeywords": "",
- "metaExtra": "",
- "children": [],
- "assetCount": 0,
- "assetCountCumulative": 0
+ "assetCount": 48,
+ "assetCountCumulative": 48
},
{
"name": "tool",
@@ -2073,29 +1546,12 @@
"metaKeywords": "",
"metaExtra": "",
"children": [],
- "assetCount": 5,
- "assetCountCumulative": 5
- },
- {
- "name": "utility",
- "slug": "utility-industrial",
- "active": true,
- "thumbnail": null,
- "thumbnailWidth": null,
- "thumbnailHeight": null,
- "order": 0,
- "alternateTitle": "part",
- "alternateUrl": "",
- "description": "",
- "metaKeywords": "",
- "metaExtra": "",
- "children": [],
- "assetCount": 0,
- "assetCountCumulative": 0
+ "assetCount": 29,
+ "assetCountCumulative": 29
}
],
- "assetCount": 18,
- "assetCountCumulative": 18
+ "assetCount": 85,
+ "assetCountCumulative": 85
},
{
"name": "interior",
@@ -2125,8 +1581,8 @@
"metaKeywords": "",
"metaExtra": "",
"children": [],
- "assetCount": 24,
- "assetCountCumulative": 24
+ "assetCount": 96,
+ "assetCountCumulative": 96
},
{
"name": "bedroom",
@@ -2142,8 +1598,8 @@
"metaKeywords": "",
"metaExtra": "",
"children": [],
- "assetCount": 14,
- "assetCountCumulative": 14
+ "assetCount": 64,
+ "assetCountCumulative": 64
},
{
"name": "decoration",
@@ -2159,8 +1615,8 @@
"metaKeywords": "",
"metaExtra": "",
"children": [],
- "assetCount": 71,
- "assetCountCumulative": 71
+ "assetCount": 291,
+ "assetCountCumulative": 291
},
{
"name": "hall",
@@ -2176,8 +1632,8 @@
"metaKeywords": "",
"metaExtra": "",
"children": [],
- "assetCount": 2,
- "assetCountCumulative": 2
+ "assetCount": 8,
+ "assetCountCumulative": 8
},
{
"name": "kids room",
@@ -2193,8 +1649,8 @@
"metaKeywords": "",
"metaExtra": "",
"children": [],
- "assetCount": 9,
- "assetCountCumulative": 9
+ "assetCount": 42,
+ "assetCountCumulative": 42
},
{
"name": "kitchen",
@@ -2210,8 +1666,8 @@
"metaKeywords": "",
"metaExtra": "",
"children": [],
- "assetCount": 69,
- "assetCountCumulative": 69
+ "assetCount": 181,
+ "assetCountCumulative": 181
},
{
"name": "living room",
@@ -2227,8 +1683,8 @@
"metaKeywords": "",
"metaExtra": "",
"children": [],
- "assetCount": 17,
- "assetCountCumulative": 17
+ "assetCount": 127,
+ "assetCountCumulative": 127
},
{
"name": "office",
@@ -2244,8 +1700,8 @@
"metaKeywords": "",
"metaExtra": "",
"children": [],
- "assetCount": 13,
- "assetCountCumulative": 13
+ "assetCount": 48,
+ "assetCountCumulative": 48
},
{
"name": "utility",
@@ -2261,12 +1717,12 @@
"metaKeywords": "",
"metaExtra": "",
"children": [],
- "assetCount": 34,
- "assetCountCumulative": 34
+ "assetCount": 74,
+ "assetCountCumulative": 74
}
],
- "assetCount": 253,
- "assetCountCumulative": 253
+ "assetCount": 932,
+ "assetCountCumulative": 932
},
{
"name": "military",
@@ -2283,23 +1739,6 @@
"metaExtra": "",
"children": [
{
- "name": "air",
- "slug": "air",
- "active": true,
- "thumbnail": null,
- "thumbnailWidth": null,
- "thumbnailHeight": null,
- "order": 0,
- "alternateTitle": "air",
- "alternateUrl": "",
- "description": "",
- "metaKeywords": "",
- "metaExtra": "",
- "children": [],
- "assetCount": 0,
- "assetCountCumulative": 0
- },
- {
"name": "equipment",
"slug": "equipment",
"active": true,
@@ -2313,25 +1752,8 @@
"metaKeywords": "",
"metaExtra": "",
"children": [],
- "assetCount": 0,
- "assetCountCumulative": 0
- },
- {
- "name": "ground",
- "slug": "ground",
- "active": true,
- "thumbnail": null,
- "thumbnailWidth": null,
- "thumbnailHeight": null,
- "order": 0,
- "alternateTitle": "ground",
- "alternateUrl": "",
- "description": "",
- "metaKeywords": "",
- "metaExtra": "",
- "children": [],
- "assetCount": 0,
- "assetCountCumulative": 0
+ "assetCount": 3,
+ "assetCountCumulative": 3
},
{
"name": "historic",
@@ -2347,25 +1769,8 @@
"metaKeywords": "",
"metaExtra": "",
"children": [],
- "assetCount": 0,
- "assetCountCumulative": 0
- },
- {
- "name": "naval",
- "slug": "naval",
- "active": true,
- "thumbnail": null,
- "thumbnailWidth": null,
- "thumbnailHeight": null,
- "order": 0,
- "alternateTitle": "naval",
- "alternateUrl": "",
- "description": "",
- "metaKeywords": "",
- "metaExtra": "",
- "children": [],
- "assetCount": 0,
- "assetCountCumulative": 0
+ "assetCount": 6,
+ "assetCountCumulative": 6
},
{
"name": "weapon",
@@ -2381,12 +1786,12 @@
"metaKeywords": "",
"metaExtra": "",
"children": [],
- "assetCount": 3,
- "assetCountCumulative": 3
+ "assetCount": 29,
+ "assetCountCumulative": 29
}
],
- "assetCount": 3,
- "assetCountCumulative": 3
+ "assetCount": 40,
+ "assetCountCumulative": 40
},
{
"name": "music",
@@ -2416,63 +1821,12 @@
"metaKeywords": "",
"metaExtra": "",
"children": [],
- "assetCount": 1,
- "assetCountCumulative": 1
- },
- {
- "name": "instruments",
- "slug": "instruments",
- "active": true,
- "thumbnail": null,
- "thumbnailWidth": null,
- "thumbnailHeight": null,
- "order": 0,
- "alternateTitle": "instruments",
- "alternateUrl": "",
- "description": "",
- "metaKeywords": "",
- "metaExtra": "",
- "children": [],
- "assetCount": 0,
- "assetCountCumulative": 0
- },
- {
- "name": "stage",
- "slug": "stage",
- "active": true,
- "thumbnail": null,
- "thumbnailWidth": null,
- "thumbnailHeight": null,
- "order": 0,
- "alternateTitle": "stage",
- "alternateUrl": "",
- "description": "",
- "metaKeywords": "",
- "metaExtra": "",
- "children": [],
- "assetCount": 0,
- "assetCountCumulative": 0
- },
- {
- "name": "studio",
- "slug": "studio",
- "active": true,
- "thumbnail": null,
- "thumbnailWidth": null,
- "thumbnailHeight": null,
- "order": 0,
- "alternateTitle": "studio",
- "alternateUrl": "",
- "description": "",
- "metaKeywords": "",
- "metaExtra": "",
- "children": [],
- "assetCount": 1,
- "assetCountCumulative": 1
+ "assetCount": 13,
+ "assetCountCumulative": 13
}
],
- "assetCount": 2,
- "assetCountCumulative": 2
+ "assetCount": 13,
+ "assetCountCumulative": 13
},
{
"name": "nature",
@@ -2502,8 +1856,8 @@
"metaKeywords": "",
"metaExtra": "",
"children": [],
- "assetCount": 3,
- "assetCountCumulative": 3
+ "assetCount": 7,
+ "assetCountCumulative": 7
},
{
"name": "atmosphere",
@@ -2519,8 +1873,8 @@
"metaKeywords": "",
"metaExtra": "",
"children": [],
- "assetCount": 0,
- "assetCountCumulative": 0
+ "assetCount": 2,
+ "assetCountCumulative": 2
},
{
"name": "landscape",
@@ -2536,8 +1890,8 @@
"metaKeywords": "",
"metaExtra": "",
"children": [],
- "assetCount": 9,
- "assetCountCumulative": 9
+ "assetCount": 35,
+ "assetCountCumulative": 35
},
{
"name": "plant",
@@ -2553,8 +1907,8 @@
"metaKeywords": "",
"metaExtra": "",
"children": [],
- "assetCount": 15,
- "assetCountCumulative": 15
+ "assetCount": 48,
+ "assetCountCumulative": 48
},
{
"name": "tree",
@@ -2570,29 +1924,12 @@
"metaKeywords": "",
"metaExtra": "",
"children": [],
- "assetCount": 14,
- "assetCountCumulative": 14
- },
- {
- "name": "weather",
- "slug": "weather",
- "active": true,
- "thumbnail": null,
- "thumbnailWidth": null,
- "thumbnailHeight": null,
- "order": 0,
- "alternateTitle": "weather",
- "alternateUrl": "",
- "description": "",
- "metaKeywords": "",
- "metaExtra": "",
- "children": [],
- "assetCount": 0,
- "assetCountCumulative": 0
+ "assetCount": 47,
+ "assetCountCumulative": 47
}
],
- "assetCount": 41,
- "assetCountCumulative": 41
+ "assetCount": 141,
+ "assetCountCumulative": 141
},
{
"name": "space",
@@ -2609,57 +1946,6 @@
"metaExtra": "",
"children": [
{
- "name": "astronomy",
- "slug": "astronomy",
- "active": true,
- "thumbnail": null,
- "thumbnailWidth": null,
- "thumbnailHeight": null,
- "order": 0,
- "alternateTitle": "astronomy",
- "alternateUrl": "",
- "description": "",
- "metaKeywords": "",
- "metaExtra": "",
- "children": [],
- "assetCount": 0,
- "assetCountCumulative": 0
- },
- {
- "name": "planets",
- "slug": "planets",
- "active": true,
- "thumbnail": null,
- "thumbnailWidth": null,
- "thumbnailHeight": null,
- "order": 0,
- "alternateTitle": "planets",
- "alternateUrl": "",
- "description": "",
- "metaKeywords": "",
- "metaExtra": "",
- "children": [],
- "assetCount": 0,
- "assetCountCumulative": 0
- },
- {
- "name": "sci-fi",
- "slug": "sci-fi-space",
- "active": true,
- "thumbnail": null,
- "thumbnailWidth": null,
- "thumbnailHeight": null,
- "order": 0,
- "alternateTitle": "sci-fi",
- "alternateUrl": "",
- "description": "",
- "metaKeywords": "",
- "metaExtra": "",
- "children": [],
- "assetCount": 0,
- "assetCountCumulative": 0
- },
- {
"name": "spacecraft",
"slug": "spacecraft",
"active": true,
@@ -2673,12 +1959,12 @@
"metaKeywords": "",
"metaExtra": "",
"children": [],
- "assetCount": 1,
- "assetCountCumulative": 1
+ "assetCount": 9,
+ "assetCountCumulative": 9
}
],
- "assetCount": 1,
- "assetCountCumulative": 1
+ "assetCount": 10,
+ "assetCountCumulative": 10
},
{
"name": "sports",
@@ -2695,23 +1981,6 @@
"metaExtra": "",
"children": [
{
- "name": "animal",
- "slug": "animal",
- "active": true,
- "thumbnail": null,
- "thumbnailWidth": null,
- "thumbnailHeight": null,
- "order": 0,
- "alternateTitle": "animal",
- "alternateUrl": "",
- "description": "",
- "metaKeywords": "",
- "metaExtra": "",
- "children": [],
- "assetCount": 0,
- "assetCountCumulative": 0
- },
- {
"name": "exercise",
"slug": "exercise",
"active": true,
@@ -2725,8 +1994,8 @@
"metaKeywords": "",
"metaExtra": "",
"children": [],
- "assetCount": 1,
- "assetCountCumulative": 1
+ "assetCount": 12,
+ "assetCountCumulative": 12
},
{
"name": "extreme",
@@ -2742,8 +2011,8 @@
"metaKeywords": "",
"metaExtra": "",
"children": [],
- "assetCount": 1,
- "assetCountCumulative": 1
+ "assetCount": 4,
+ "assetCountCumulative": 4
},
{
"name": "individual",
@@ -2759,8 +2028,8 @@
"metaKeywords": "",
"metaExtra": "",
"children": [],
- "assetCount": 1,
- "assetCountCumulative": 1
+ "assetCount": 5,
+ "assetCountCumulative": 5
},
{
"name": "outdoor",
@@ -2776,8 +2045,8 @@
"metaKeywords": "",
"metaExtra": "",
"children": [],
- "assetCount": 1,
- "assetCountCumulative": 1
+ "assetCount": 4,
+ "assetCountCumulative": 4
},
{
"name": "team",
@@ -2793,12 +2062,12 @@
"metaKeywords": "",
"metaExtra": "",
"children": [],
- "assetCount": 3,
- "assetCountCumulative": 3
+ "assetCount": 6,
+ "assetCountCumulative": 6
}
],
- "assetCount": 7,
- "assetCountCumulative": 7
+ "assetCount": 31,
+ "assetCountCumulative": 31
},
{
"name": "technology",
@@ -2815,23 +2084,6 @@
"metaExtra": "",
"children": [
{
- "name": "ai",
- "slug": "ai",
- "active": true,
- "thumbnail": null,
- "thumbnailWidth": null,
- "thumbnailHeight": null,
- "order": 0,
- "alternateTitle": "ai",
- "alternateUrl": "",
- "description": "",
- "metaKeywords": "",
- "metaExtra": "",
- "children": [],
- "assetCount": 0,
- "assetCountCumulative": 0
- },
- {
"name": "audio",
"slug": "audio",
"active": true,
@@ -2845,8 +2097,8 @@
"metaKeywords": "",
"metaExtra": "",
"children": [],
- "assetCount": 1,
- "assetCountCumulative": 1
+ "assetCount": 16,
+ "assetCountCumulative": 16
},
{
"name": "computer",
@@ -2862,25 +2114,8 @@
"metaKeywords": "",
"metaExtra": "",
"children": [],
- "assetCount": 2,
- "assetCountCumulative": 2
- },
- {
- "name": "medical",
- "slug": "medical",
- "active": true,
- "thumbnail": null,
- "thumbnailWidth": null,
- "thumbnailHeight": null,
- "order": 0,
- "alternateTitle": "medical",
- "alternateUrl": "",
- "description": "",
- "metaKeywords": "",
- "metaExtra": "",
- "children": [],
- "assetCount": 0,
- "assetCountCumulative": 0
+ "assetCount": 102,
+ "assetCountCumulative": 102
},
{
"name": "phone",
@@ -2896,8 +2131,8 @@
"metaKeywords": "",
"metaExtra": "",
"children": [],
- "assetCount": 2,
- "assetCountCumulative": 2
+ "assetCount": 8,
+ "assetCountCumulative": 8
},
{
"name": "photography",
@@ -2913,46 +2148,12 @@
"metaKeywords": "",
"metaExtra": "",
"children": [],
- "assetCount": 1,
- "assetCountCumulative": 1
- },
- {
- "name": "science",
- "slug": "science",
- "active": true,
- "thumbnail": null,
- "thumbnailWidth": null,
- "thumbnailHeight": null,
- "order": 0,
- "alternateTitle": "science",
- "alternateUrl": "",
- "description": "",
- "metaKeywords": "",
- "metaExtra": "",
- "children": [],
- "assetCount": 0,
- "assetCountCumulative": 0
- },
- {
- "name": "video",
- "slug": "video",
- "active": true,
- "thumbnail": null,
- "thumbnailWidth": null,
- "thumbnailHeight": null,
- "order": 0,
- "alternateTitle": "video",
- "alternateUrl": "",
- "description": "",
- "metaKeywords": "",
- "metaExtra": "",
- "children": [],
- "assetCount": 0,
- "assetCountCumulative": 0
+ "assetCount": 6,
+ "assetCountCumulative": 6
}
],
- "assetCount": 6,
- "assetCountCumulative": 6
+ "assetCount": 138,
+ "assetCountCumulative": 138
},
{
"name": "vehicle",
@@ -2969,40 +2170,6 @@
"metaExtra": "",
"children": [
{
- "name": "bicycle",
- "slug": "bicycle",
- "active": true,
- "thumbnail": null,
- "thumbnailWidth": null,
- "thumbnailHeight": null,
- "order": 0,
- "alternateTitle": "bicycle",
- "alternateUrl": "",
- "description": "",
- "metaKeywords": "",
- "metaExtra": "",
- "children": [],
- "assetCount": 0,
- "assetCountCumulative": 0
- },
- {
- "name": "bus",
- "slug": "bus",
- "active": true,
- "thumbnail": null,
- "thumbnailWidth": null,
- "thumbnailHeight": null,
- "order": 0,
- "alternateTitle": "bus",
- "alternateUrl": "",
- "description": "",
- "metaKeywords": "",
- "metaExtra": "",
- "children": [],
- "assetCount": 0,
- "assetCountCumulative": 0
- },
- {
"name": "car",
"slug": "car",
"active": true,
@@ -3016,25 +2183,8 @@
"metaKeywords": "",
"metaExtra": "",
"children": [],
- "assetCount": 9,
- "assetCountCumulative": 9
- },
- {
- "name": "historic",
- "slug": "historic-vehicle",
- "active": true,
- "thumbnail": null,
- "thumbnailWidth": null,
- "thumbnailHeight": null,
- "order": 0,
- "alternateTitle": "historic",
- "alternateUrl": "",
- "description": "",
- "metaKeywords": "",
- "metaExtra": "",
- "children": [],
- "assetCount": 0,
- "assetCountCumulative": 0
+ "assetCount": 43,
+ "assetCountCumulative": 43
},
{
"name": "industrial",
@@ -3050,25 +2200,8 @@
"metaKeywords": "",
"metaExtra": "",
"children": [],
- "assetCount": 1,
- "assetCountCumulative": 1
- },
- {
- "name": "motorcycle",
- "slug": "motorcycle",
- "active": true,
- "thumbnail": null,
- "thumbnailWidth": null,
- "thumbnailHeight": null,
- "order": 0,
- "alternateTitle": "motorcycle",
- "alternateUrl": "",
- "description": "",
- "metaKeywords": "",
- "metaExtra": "",
- "children": [],
- "assetCount": 0,
- "assetCountCumulative": 0
+ "assetCount": 5,
+ "assetCountCumulative": 5
},
{
"name": "part",
@@ -3084,222 +2217,32 @@
"metaKeywords": "",
"metaExtra": "",
"children": [],
- "assetCount": 8,
- "assetCountCumulative": 8
- },
- {
- "name": "train",
- "slug": "train",
- "active": true,
- "thumbnail": null,
- "thumbnailWidth": null,
- "thumbnailHeight": null,
- "order": 0,
- "alternateTitle": "train",
- "alternateUrl": "",
- "description": "",
- "metaKeywords": "",
- "metaExtra": "",
- "children": [],
- "assetCount": 0,
- "assetCountCumulative": 0
- },
- {
- "name": "truck",
- "slug": "truck",
- "active": true,
- "thumbnail": null,
- "thumbnailWidth": null,
- "thumbnailHeight": null,
- "order": 0,
- "alternateTitle": "truck",
- "alternateUrl": "",
- "description": "",
- "metaKeywords": "",
- "metaExtra": "",
- "children": [],
- "assetCount": 0,
- "assetCountCumulative": 0
- }
- ],
- "assetCount": 18,
- "assetCountCumulative": 18
- },
- {
- "name": "watercraft",
- "slug": "watercraft",
- "active": true,
- "thumbnail": null,
- "thumbnailWidth": null,
- "thumbnailHeight": null,
- "order": 0,
- "alternateTitle": "watercraft",
- "alternateUrl": "",
- "description": "",
- "metaKeywords": "",
- "metaExtra": "",
- "children": [
- {
- "name": "historic",
- "slug": "historic-watercraft",
- "active": true,
- "thumbnail": null,
- "thumbnailWidth": null,
- "thumbnailHeight": null,
- "order": 0,
- "alternateTitle": "historic",
- "alternateUrl": "",
- "description": "",
- "metaKeywords": "",
- "metaExtra": "",
- "children": [],
- "assetCount": 0,
- "assetCountCumulative": 0
- },
- {
- "name": "industrial",
- "slug": "industrial-watercraft",
- "active": true,
- "thumbnail": null,
- "thumbnailWidth": null,
- "thumbnailHeight": null,
- "order": 0,
- "alternateTitle": "industrial",
- "alternateUrl": "",
- "description": "",
- "metaKeywords": "",
- "metaExtra": "",
- "children": [],
- "assetCount": 0,
- "assetCountCumulative": 0
- },
- {
- "name": "part",
- "slug": "part-watercraft",
- "active": true,
- "thumbnail": null,
- "thumbnailWidth": null,
- "thumbnailHeight": null,
- "order": 0,
- "alternateTitle": "part",
- "alternateUrl": "",
- "description": "",
- "metaKeywords": "",
- "metaExtra": "",
- "children": [],
- "assetCount": 0,
- "assetCountCumulative": 0
- },
- {
- "name": "personal",
- "slug": "personal",
- "active": true,
- "thumbnail": null,
- "thumbnailWidth": null,
- "thumbnailHeight": null,
- "order": 0,
- "alternateTitle": "personal",
- "alternateUrl": "",
- "description": "",
- "metaKeywords": "",
- "metaExtra": "",
- "children": [],
- "assetCount": 0,
- "assetCountCumulative": 0
+ "assetCount": 20,
+ "assetCountCumulative": 20
},
{
- "name": "recreational",
- "slug": "recreational",
+ "name": "Animals",
+ "slug": "animals",
"active": true,
"thumbnail": null,
"thumbnailWidth": null,
"thumbnailHeight": null,
"order": 0,
- "alternateTitle": "recreational",
+ "alternateTitle": "Animals",
"alternateUrl": "",
"description": "",
"metaKeywords": "",
"metaExtra": "",
"children": [],
"assetCount": 0,
- "assetCountCumulative": 0
- }
- ],
- "assetCount": 0,
- "assetCountCumulative": 0
- }
- ],
- "assetCount": 621,
- "assetCountCumulative": 621
- },
- {
- "name": "texture",
- "slug": "texture",
- "active": true,
- "thumbnail": null,
- "thumbnailWidth": null,
- "thumbnailHeight": null,
- "order": 0,
- "alternateTitle": "texture",
- "alternateUrl": "",
- "description": "",
- "metaKeywords": "",
- "metaExtra": "",
- "children": [
- {
- "name": "Animals",
- "slug": "animals",
- "active": true,
- "thumbnail": null,
- "thumbnailWidth": null,
- "thumbnailHeight": null,
- "order": 0,
- "alternateTitle": "Animals",
- "alternateUrl": "",
- "description": "",
- "metaKeywords": "",
- "metaExtra": "",
- "children": [
- {
- "name": "Mammals",
- "slug": "mammals",
- "active": true,
- "thumbnail": null,
- "thumbnailWidth": null,
- "thumbnailHeight": null,
- "order": 0,
- "alternateTitle": "Mammals",
- "alternateUrl": "",
- "description": "",
- "metaKeywords": "",
- "metaExtra": "",
- "children": [],
- "assetCount": 0,
- "assetCountCumulative": 0
- },
- {
- "name": "Plants",
- "slug": "plants",
- "active": true,
- "thumbnail": null,
- "thumbnailWidth": null,
- "thumbnailHeight": null,
- "order": 0,
- "alternateTitle": "Plants",
- "alternateUrl": "",
- "description": "",
- "metaKeywords": "",
- "metaExtra": "",
- "children": [],
- "assetCount": 0,
- "assetCountCumulative": 0
+ "assetCountCumulative": 11
}
],
- "assetCount": 0,
- "assetCountCumulative": 1
+ "assetCount": 70,
+ "assetCountCumulative": 70
}
],
- "assetCount": 0,
- "assetCountCumulative": 0
+ "assetCount": 2572,
+ "assetCountCumulative": 2572
}
] \ No newline at end of file
diff --git a/blenderkit/download.py b/blenderkit/download.py
index 7ec425ce..545b06f2 100644
--- a/blenderkit/download.py
+++ b/blenderkit/download.py
@@ -307,17 +307,27 @@ def append_asset(asset_data, **kwargs): # downloaders=[], location=None,
# copy for override
al = sprops.append_link
- import_as = sprops.import_as
# set consistency for objects already in scene, otherwise this literally breaks blender :)
ain = asset_in_scene(asset_data)
+
# override based on history
if ain is not False:
if ain == 'LINKED':
al = 'LINK'
- import_as = 'GROUP'
else:
al = 'APPEND'
- import_as = 'INDIVIDUAL'
+ if asset_data['assetType'] == 'model':
+ source_parent = get_asset_in_scene(asset_data)
+ parent, new_obs = duplicate_asset(source=source_parent, **kwargs)
+ parent.location = kwargs['model_location']
+ parent.rotation_euler = kwargs['model_rotation']
+ # this is a case where asset is already in scene and should be duplicated instead.
+ # there is a big chance that the duplication wouldn't work perfectly(hidden or unselectable objects)
+ # so here we need to check and return if there was success
+ # also, if it was successful, no other operations are needed , basically all asset data is already ready from the original asset
+ if new_obs:
+ bpy.ops.wm.undo_push_context(message='add %s to scene' % asset_data['name'])
+ return
# first get conditions for append link
link = al == 'LINK'
@@ -334,8 +344,8 @@ def append_asset(asset_data, **kwargs): # downloaders=[], location=None,
name=asset_data['name'])
return
- if sprops.import_as == 'GROUP':
- parent, newobs = append_link.link_collection(file_names[-1],
+ if link:
+ parent, new_obs = append_link.link_collection(file_names[-1],
location=downloader['location'],
rotation=downloader['rotation'],
link=link,
@@ -343,7 +353,8 @@ def append_asset(asset_data, **kwargs): # downloaders=[], location=None,
parent=kwargs.get('parent'))
else:
- parent, newobs = append_link.append_objects(file_names[-1],
+
+ parent, new_obs = append_link.append_objects(file_names[-1],
location=downloader['location'],
rotation=downloader['rotation'],
link=link,
@@ -356,19 +367,22 @@ def append_asset(asset_data, **kwargs): # downloaders=[], location=None,
parent.empty_display_size = size_min
elif kwargs.get('model_location') is not None:
- if sprops.import_as == 'GROUP':
- parent, newobs = append_link.link_collection(file_names[-1],
+ if link:
+ parent, new_obs = append_link.link_collection(file_names[-1],
location=kwargs['model_location'],
rotation=kwargs['model_rotation'],
link=link,
name=asset_data['name'],
parent=kwargs.get('parent'))
else:
- parent, newobs = append_link.append_objects(file_names[-1],
+ parent, new_obs = append_link.append_objects(file_names[-1],
location=kwargs['model_location'],
rotation=kwargs['model_rotation'],
link=link,
+ name=asset_data['name'],
parent=kwargs.get('parent'))
+
+ # scale Empty for assets, so they don't clutter the scene.
if parent.type == 'EMPTY' and link:
bmin = asset_data['bbox_min']
bmax = asset_data['bbox_max']
@@ -723,14 +737,74 @@ def try_finished_append(asset_data, **kwargs): # location=None, material_target
for f in file_names:
try:
os.remove(f)
- except:
- e = sys.exc_info()[0]
+ except Exception as e:
+ # e = sys.exc_info()[0]
print(e)
pass;
done = False
return done
+def get_asset_in_scene(asset_data):
+ '''tries to find an appended copy of particular asset and duplicate it - so it doesn't have to be appended again.'''
+ scene = bpy.context.scene
+ for ob in bpy.context.scene.objects:
+ ad1 = ob.get('asset_data')
+ if not ad1:
+ continue
+ if ad1.get('assetBaseId') == asset_data['assetBaseId']:
+ return ob
+ return None
+
+
+def check_all_visible(obs):
+ '''checks all objects are visible, so they can be manipulated/copied.'''
+ for ob in obs:
+ if not ob.visible_get():
+ return False
+ return True
+
+
+def check_selectible(obs):
+ '''checks if all objects can be selected and selects them if possible.
+ this isn't only select_hide, but all possible combinations of collections e.t.c. so hard to check otherwise.'''
+ for ob in obs:
+ ob.select_set(True)
+ if not ob.select_get():
+ return False
+ return True
+
+
+def duplicate_asset(source, **kwargs):
+ '''Duplicate asset when it's already appended in the scene, so that blender's append doesn't create duplicated data.'''
+
+ # we need to save selection
+ sel = utils.selection_get()
+ bpy.ops.object.select_all(action='DESELECT')
+
+ # check visibility
+ obs = utils.get_hierarchy(source)
+ if not check_all_visible(obs):
+ return None
+ # check selectability and select in one run
+ if not check_selectible(obs):
+ return None
+
+ # duplicate the asset objects
+ bpy.ops.object.duplicate(linked=True)
+
+
+ nobs = bpy.context.selected_objects[:]
+ #get parent
+ for ob in nobs:
+ if ob.parent not in nobs:
+ parent = ob
+ break
+ # restore original selection
+ utils.selection_set(sel)
+ return parent , nobs
+
+
def asset_in_scene(asset_data):
'''checks if the asset is already in scene. If yes, modifies asset data so the asset can be reached again.'''
scene = bpy.context.scene
@@ -744,10 +818,12 @@ def asset_in_scene(asset_data):
asset_data['file_name'] = ad['file_name']
asset_data['url'] = ad['url']
- c = bpy.data.collections.get(ad['name'])
- if c is not None:
- if c.users > 0:
- return 'LINKED'
+ # browse all collections since linked collections can have same name.
+ for c in bpy.data.collections:
+ if c.name == ad['name']:
+ # there can also be more linked collections with same name, we need to check id.
+ if c.library and c.library.get('asset_data') and c.library['asset_data']['assetBaseId'] == id:
+ return 'LINKED'
return 'APPENDED'
return False
@@ -934,7 +1010,7 @@ class BlenderkitDownloadOperator(bpy.types.Operator):
if self.replace: # cleanup first, assign later.
obs = utils.get_selected_replace_adepts()
- print(obs)
+ # print(obs)
for ob in obs:
print('replace attempt ', ob.name)
if self.asset_base_id != '':
diff --git a/blenderkit/paths.py b/blenderkit/paths.py
index 2f144268..399e7555 100644
--- a/blenderkit/paths.py
+++ b/blenderkit/paths.py
@@ -16,7 +16,8 @@
#
# ##### END GPL LICENSE BLOCK #####
-import bpy, os, sys
+import bpy, os, sys, tempfile, shutil
+from blenderkit import tasks_queue, ui
_presets = os.path.join(bpy.utils.user_resource('SCRIPTS'), "presets")
BLENDERKIT_LOCAL = "http://localhost:8001"
@@ -35,6 +36,15 @@ BLENDERKIT_OAUTH_LANDING_URL = "/oauth-landing/"
BLENDERKIT_SIGNUP_URL = "https://www.blenderkit.com/accounts/register"
BLENDERKIT_SETTINGS_FILENAME = os.path.join(_presets, "bkit.json")
+def cleanup_old_folders():
+ '''function to clean up any historical folders for BlenderKit. By now removes the temp folder.'''
+ orig_temp = os.path.join(os.path.expanduser('~'), 'blenderkit_data', 'temp')
+ if os.path.isdir(orig_temp):
+ try:
+ shutil.rmtree(orig_temp)
+ except Exception as e:
+ print(e)
+ print("couldn't delete old temp directory")
def get_bkit_url():
# bpy.app.debug_value = 2
@@ -65,6 +75,9 @@ def get_api_url():
def get_oauth_landing_url():
return get_bkit_url() + BLENDERKIT_OAUTH_LANDING_URL
+def get_author_gallery_url(author_id):
+ return f'{get_bkit_url()}/asset-gallery?query=author_id:{author_id}'
+
def default_global_dict():
from os.path import expanduser
@@ -81,7 +94,7 @@ def get_temp_dir(subdir=None):
user_preferences = bpy.context.preferences.addons['blenderkit'].preferences
# tempdir = user_preferences.temp_dir
- tempdir = os.path.join(user_preferences.global_dir, 'temp')
+ tempdir = os.path.join(tempfile.gettempdir(), 'bkit_temp')
if tempdir.startswith('//'):
tempdir = bpy.path.abspath(tempdir)
try:
@@ -91,11 +104,14 @@ def get_temp_dir(subdir=None):
tempdir = os.path.join(tempdir, subdir)
if not os.path.exists(tempdir):
os.makedirs(tempdir)
+ cleanup_old_folders()
except:
- print('Cache directory not found. Resetting Cache folder path.')
+ tasks_queue.add_task((ui.add_report, ('Cache directory not found. Resetting Cache folder path.',)))
+
p = default_global_dict()
if p == user_preferences.global_dir:
- print('Global dir was already default, plese set a global directory in addon preferences to a dir where you have write permissions.')
+ message = 'Global dir was already default, plese set a global directory in addon preferences to a dir where you have write permissions.'
+ tasks_queue.add_task((ui.add_report, (message,)))
return None
user_preferences.global_dir = p
tempdir = get_temp_dir(subdir = subdir)
diff --git a/blenderkit/ratings.py b/blenderkit/ratings.py
index 38dbbaf8..7d246a9f 100644
--- a/blenderkit/ratings.py
+++ b/blenderkit/ratings.py
@@ -78,6 +78,7 @@ def send_rating_to_thread_quality(url, ratings, headers):
thread = threading.Thread(target=upload_rating_thread, args=(url, ratings, headers))
thread.start()
+
def send_rating_to_thread_work_hours(url, ratings, headers):
'''Sens rating into thread rating, main purpose is for tasks_queue.
One function per property to avoid lost data due to stashing.'''
@@ -93,6 +94,7 @@ def upload_review_thread(url, reviews, headers):
def get_rating(asset_id):
+ # this function isn't used anywhere,should probably get removed.
user_preferences = bpy.context.preferences.addons['blenderkit'].preferences
api_key = user_preferences.api_key
headers = utils.get_headers(api_key)
@@ -112,8 +114,13 @@ def update_ratings_quality(self, context):
headers = utils.get_headers(api_key)
asset = self.id_data
- bkit_ratings = asset.bkit_ratings
- url = paths.get_api_url() + 'assets/' + asset['asset_data']['id'] + '/rating/'
+ if asset:
+ bkit_ratings = asset.bkit_ratings
+ url = paths.get_api_url() + 'assets/' + asset['asset_data']['id'] + '/rating/'
+ else:
+ # this part is for operator rating:
+ bkit_ratings = self
+ url = paths.get_api_url() + f'assets/{self.asset_id}/rating/'
if bkit_ratings.rating_quality > 0.1:
ratings = [('quality', bkit_ratings.rating_quality)]
@@ -125,10 +132,15 @@ def update_ratings_work_hours(self, context):
api_key = user_preferences.api_key
headers = utils.get_headers(api_key)
asset = self.id_data
- bkit_ratings = asset.bkit_ratings
- url = paths.get_api_url() + 'assets/' + asset['asset_data']['id'] + '/rating/'
-
- if bkit_ratings.rating_quality > 0.1:
+ if asset:
+ bkit_ratings = asset.bkit_ratings
+ url = paths.get_api_url() + 'assets/' + asset['asset_data']['id'] + '/rating/'
+ else:
+ # this part is for operator rating:
+ bkit_ratings = self
+ url = paths.get_api_url() + f'assets/{self.asset_id}/rating/'
+
+ if bkit_ratings.rating_work_hours > 0.05:
ratings = [('working_hours', round(bkit_ratings.rating_work_hours, 1))]
tasks_queue.add_task((send_rating_to_thread_work_hours, (url, ratings, headers)), wait=1, only_last=True)
@@ -171,25 +183,23 @@ def upload_rating(asset):
s['assets rated'][asset['asset_data']['assetBaseId']] = True
-class StarRatingOperator(bpy.types.Operator):
- """Tooltip"""
- bl_idname = "object.blenderkit_rating"
- bl_label = "Rate the Asset Quality"
- bl_options = {'REGISTER', 'INTERNAL'}
+def get_assets_for_rating():
+ '''
+ gets assets from scene that could/should be rated by the user.
+ TODO this is only a draft.
- property_name: StringProperty(
- name="Rating Property",
- description="Property that is rated",
- default="",
- )
-
- rating: IntProperty(name="Rating", description="rating value", default=1, min=1, max=10)
-
- def execute(self, context):
- asset = utils.get_active_asset()
- props = asset.bkit_ratings
- props.rating_quality = self.rating
- return {'FINISHED'}
+ '''
+ assets = []
+ for ob in bpy.context.scene.objects:
+ if ob.get('asset_data'):
+ assets.append(ob)
+ for m in bpy.data.materials:
+ if m.get('asset_data'):
+ assets.append(m)
+ for b in bpy.data.brushes:
+ if b.get('asset_data'):
+ assets.append(b)
+ return assets
asset_types = (
@@ -234,29 +244,215 @@ class UploadRatingOperator(bpy.types.Operator):
return wm.invoke_props_dialog(self)
-def draw_rating(layout, props, prop_name, name):
- # layout.label(name)
-
- row = layout.row(align=True)
-
+def stars_enum_callback(self, context):
+ '''regenerates the enum property used to display rating stars, so that there are filled/empty stars correctly.'''
+ items = []
for a in range(0, 10):
- if eval('props.' + prop_name) < a + 1:
+ if self.rating_quality < a + 1:
icon = 'SOLO_OFF'
else:
icon = 'SOLO_ON'
+ # has to have something before the number in the value, otherwise fails on registration.
+ items.append((f'{a + 1}', f'{a + 1}', '', icon, a + 1))
+ return items
+
+
+def update_quality_ui(self, context):
+ '''Converts the _ui the enum into actual quality number.'''
+ user_preferences = bpy.context.preferences.addons['blenderkit'].preferences
+ if user_preferences.api_key == '':
+ # ui_panels.draw_not_logged_in(self, message='Please login/signup to rate assets.')
+ # bpy.ops.wm.call_menu(name='OBJECT_MT_blenderkit_login_menu')
+ # return
+ bpy.ops.wm.blenderkit_login('INVOKE_DEFAULT',
+ message='Please login/signup to rate assets. Clicking OK takes you to web login.')
+ self.rating_quality_ui = '0'
+ self.rating_quality = int(self.rating_quality_ui)
+
+
+def update_ratings_work_hours_ui(self, context):
+ user_preferences = bpy.context.preferences.addons['blenderkit'].preferences
+ if user_preferences.api_key == '':
+ # ui_panels.draw_not_logged_in(self, message='Please login/signup to rate assets.')
+ # bpy.ops.wm.call_menu(name='OBJECT_MT_blenderkit_login_menu')
+ # return
+ bpy.ops.wm.blenderkit_login('INVOKE_DEFAULT',
+ message='Please login/signup to rate assets. Clicking OK takes you to web login.')
+ self.rating_work_hours_ui = '0'
+ self.rating_work_hours = float(self.rating_work_hours_ui)
+
+def update_ratings_work_hours_ui_1_5(self, context):
+ user_preferences = bpy.context.preferences.addons['blenderkit'].preferences
+ if user_preferences.api_key == '':
+ # ui_panels.draw_not_logged_in(self, message='Please login/signup to rate assets.')
+ # bpy.ops.wm.call_menu(name='OBJECT_MT_blenderkit_login_menu')
+ # return
+ bpy.ops.wm.blenderkit_login('INVOKE_DEFAULT',
+ message='Please login/signup to rate assets. Clicking OK takes you to web login.')
+ self.update_ratings_work_hours_ui_1_5 = '0'
+ self.rating_work_hours = float(self.update_ratings_work_hours_ui_1_5)
+
+
+
+class FastRateMenu(Operator):
+ """Fast rating of the assets directly in the asset bar - without need to download assets."""
+ bl_idname = "wm.blenderkit_menu_rating_upload"
+ bl_label = "Send Rating"
+ bl_options = {'REGISTER', 'UNDO', 'INTERNAL'}
+
+ message: StringProperty(
+ name="message",
+ description="message",
+ default="Rating asset")
+
+ asset_id: StringProperty(
+ name="Asset Base Id",
+ description="Unique name of the asset (hidden)",
+ default="")
+
+ asset_type: StringProperty(
+ name="Asset type",
+ description="asset type",
+ default="")
+
+ rating_quality: IntProperty(name="Quality",
+ description="quality of the material",
+ default=0,
+ min=-1, max=10,
+ update=update_ratings_quality)
+
+ # the following enum is only to ease interaction - enums support 'drag over' and enable to draw the stars easily.
+ rating_quality_ui: EnumProperty(name='rating_quality_ui',
+ items=stars_enum_callback,
+ description='Rating stars 0 - 10',
+ default=None,
+ update=update_quality_ui,
+ )
+
+ rating_work_hours: FloatProperty(name="Work Hours",
+ description="How many hours did this work take?",
+ default=0.00,
+ min=0.0, max=1000, update=update_ratings_work_hours
+ )
+
+ rating_work_hours_ui: EnumProperty(name="Work Hours",
+ description="How many hours did this work take?",
+ items=[('0', '0', ''),
+ ('.5', '0.5', ''),
+ ('1', '1', ''),
+ ('2', '2', ''),
+ ('3', '3', ''),
+ ('4', '4', ''),
+ ('5', '5', ''),
+ ('10', '10', ''),
+ ('15', '15', ''),
+ ('20', '20', ''),
+ ('50', '50', ''),
+ ('100', '100', ''),
+ ('150', '100', ''),
+ ('200', '100', ''),
+ ('250', '100', ''),
+ ],
+ default='0', update=update_ratings_work_hours_ui
+ )
+
+ rating_work_hours_ui_1_5: EnumProperty(name="Work Hours",
+ description="How many hours did this work take?",
+ items=[('0', '0', ''),
+ ('.2', '0.2', ''),
+ ('.5', '0.5', ''),
+ ('1', '1', ''),
+ ('2', '2', ''),
+ ('3', '3', ''),
+ ('4', '4', ''),
+ ('5', '5', '')
+ ],
+ default='0', update=update_ratings_work_hours_ui_1_5
+ )
+
+ @classmethod
+ def poll(cls, context):
+ scene = bpy.context.scene
+ ui_props = scene.blenderkitUI
+ return ui_props.active_index > -1
- op = row.operator('object.blenderkit_rating', icon=icon, emboss=False, text='')
- op.property_name = prop_name
- op.rating = a + 1
+ def draw(self, context):
+ layout = self.layout
+ col = layout.column()
+
+ # layout.template_icon_view(bkit_ratings, property, show_labels=False, scale=6.0, scale_popup=5.0)
+ col.label(text=self.message)
+ row = col.row()
+ row.prop(self, 'rating_quality_ui', expand=True, icon_only=True, emboss=False)
+ col.separator()
+ col.prop(self, 'rating_work_hours')
+ row = col.row()
+ if self.asset_type == 'model':
+ row.prop(self, 'rating_work_hours_ui', expand=True, icon_only=False, emboss=True)
+ else:
+ row.prop(self, 'rating_work_hours_ui_1_5', expand=True, icon_only=False, emboss=True)
+
+ def execute(self, context):
+ user_preferences = bpy.context.preferences.addons['blenderkit'].preferences
+ api_key = user_preferences.api_key
+ headers = utils.get_headers(api_key)
+ url = paths.get_api_url() + f'assets/{self.asset_id}/rating/'
+
+ rtgs = [
+
+ ]
+
+ self.rating_quality = int(self.rating_quality_ui)
+
+ if self.rating_quality > 0.1:
+ rtgs.append(('quality', self.rating_quality))
+ if self.rating_work_hours > 0.1:
+ rtgs.append(('working_hours', round(self.rating_work_hours, 1)))
+
+ thread = threading.Thread(target=upload_rating_thread, args=(url, rtgs, headers))
+ thread.start()
+ return {'FINISHED'}
+
+ def invoke(self, context, event):
+ scene = bpy.context.scene
+ ui_props = scene.blenderkitUI
+ if ui_props.active_index > -1:
+ sr = bpy.context.scene['search results']
+ asset_data = dict(sr[ui_props.active_index])
+ self.asset_id = asset_data['id']
+ self.asset_type = asset_data['assetType']
+ self.message = f"Rate asset {asset_data['name']}"
+ wm = context.window_manager
+ return wm.invoke_props_dialog(self)
+
+
+def rating_menu_draw(self, context):
+ layout = self.layout
+
+ ui_props = context.scene.blenderkitUI
+ sr = bpy.context.scene['search results orig']
+
+ asset_search_index = ui_props.active_index
+ if asset_search_index > -1:
+ asset_data = dict(sr['results'][asset_search_index])
+
+ col = layout.column()
+ layout.label(text='Admin rating Tools:')
+ col.operator_context = 'INVOKE_DEFAULT'
+
+ op = col.operator('wm.blenderkit_menu_rating_upload', text='Fast rate')
+ op.asset_id = asset_data['id']
+ op.asset_type = asset_data['assetType']
def register_ratings():
- pass;
- bpy.utils.register_class(StarRatingOperator)
bpy.utils.register_class(UploadRatingOperator)
+ bpy.utils.register_class(FastRateMenu)
+ # bpy.types.OBJECT_MT_blenderkit_asset_menu.append(rating_menu_draw)
def unregister_ratings():
pass;
- bpy.utils.unregister_class(StarRatingOperator)
+ # bpy.utils.unregister_class(StarRatingOperator)
bpy.utils.unregister_class(UploadRatingOperator)
+ bpy.utils.unregister_class(FastRateMenu)
diff --git a/blenderkit/rerequests.py b/blenderkit/rerequests.py
index 28c7e2ca..c655c8c5 100644
--- a/blenderkit/rerequests.py
+++ b/blenderkit/rerequests.py
@@ -40,7 +40,7 @@ def rerequest(method, url, **kwargs):
# first normal attempt
response = requests.request(method, url, **kwargs)
- utils.p(url)
+ utils.p(url, kwargs)
utils.p(response.status_code)
if response.status_code == 401:
@@ -68,12 +68,19 @@ def rerequest(method, url, **kwargs):
# in non-threaded tasks
bpy.context.preferences.addons['blenderkit'].preferences.api_key = auth_token
bpy.context.preferences.addons['blenderkit'].preferences.api_key_refresh = refresh_token
+ else:
+ tasks_queue.add_task((bkit_oauth.write_tokens, (auth_token, refresh_token, oauth_response)))
kwargs['headers'] = utils.get_headers(auth_token)
response = requests.request(method, url, **kwargs)
utils.p('reresult', response.status_code)
if response.status_code >= 400:
utils.p('reresult', response.text)
+ else:
+ tasks_queue.add_task((ui.add_report, (
+ 'Refreshing token failed.Please login manually.', 10)))
+ # tasks_queue.add_task((bkit_oauth.write_tokens, ('', '', '')))
+ tasks_queue.add_task((bpy.ops.wm.blenderkit_login,( 'INVOKE_DEFAULT',)),fake_context = True)
return response
diff --git a/blenderkit/search.py b/blenderkit/search.py
index 474630e6..25d85d88 100644
--- a/blenderkit/search.py
+++ b/blenderkit/search.py
@@ -56,8 +56,8 @@ import requests, os, random
import time
import threading
import platform
-import json
import bpy
+import copy
search_start_time = 0
prev_time = 0
@@ -72,7 +72,7 @@ def check_errors(rdata):
if user_preferences.enable_oauth:
bkit_oauth.refresh_token_thread()
return False, rdata.get('detail')
- return False, 'Missing or wrong api_key in addon preferences'
+ return False, 'Use login panel to connect your profile.'
return True, ''
@@ -101,22 +101,45 @@ def refresh_token_timer():
return max(3600, user_preferences.api_key_life - 3600)
+def update_ad(ad):
+ if not ad.get('assetBaseId'):
+ ad['assetBaseId'] = ad['asset_base_id'] # this should stay ONLY for compatibility with older scenes
+ ad['assetType'] = ad['asset_type'] # this should stay ONLY for compatibility with older scenes
+ ad['canDownload'] = ad['can_download'] # this should stay ONLY for compatibility with older scenes
+ ad['verificationStatus'] = ad['verification_status'] # this should stay ONLY for compatibility with older scenes
+ ad['author'] = {}
+ ad['author']['id'] = ad['author_id'] # this should stay ONLY for compatibility with older scenes
+ return ad
def update_assets_data(): # updates assets data on scene load.
'''updates some properties that were changed on scenes with older assets.
The properties were mainly changed from snake_case to CamelCase to fit the data that is coming from the server.
'''
- for ob in bpy.context.scene.objects:
- if ob.get('asset_data') != None:
- ad = ob['asset_data']
- if not ad.get('assetBaseId'):
- ad['assetBaseId'] = ad['asset_base_id'], # this should stay ONLY for compatibility with older scenes
- ad['assetType'] = ad['asset_type'], # this should stay ONLY for compatibility with older scenes
- ad['canDownload'] = ad['can_download'], # this should stay ONLY for compatibility with older scenes
- ad['verificationStatus'] = ad[
- 'verification_status'], # this should stay ONLY for compatibility with older scenes
- ad['author'] = {}
- ad['author']['id'] = ad['author_id'], # this should stay ONLY for compatibility with older scenes
+ data = bpy.data
+
+ datablocks = [
+ bpy.data.objects,
+ bpy.data.materials,
+ bpy.data.brushes,
+ ]
+ for dtype in datablocks:
+ for block in dtype:
+ if block.get('asset_data') != None:
+ update_ad(block['asset_data'])
+
+ dicts = [
+ 'assets used',
+ 'assets rated',
+ ]
+ for s in bpy.data.scenes:
+ for k in dicts:
+ d = s.get(k)
+ if not d:
+ continue;
+
+ for k in d.keys():
+ update_ad(d[k])
+ # bpy.context.scene['assets used'][ad] = ad
@persistent
@@ -134,13 +157,13 @@ def scene_load(context):
def fetch_server_data():
- ''' download categories and addon version'''
+ ''' download categories , profile, and refresh token if needed.'''
if not bpy.app.background:
user_preferences = bpy.context.preferences.addons['blenderkit'].preferences
api_key = user_preferences.api_key
# Only refresh new type of tokens(by length), and only one hour before the token timeouts.
if user_preferences.enable_oauth and \
- len(user_preferences.api_key) < 38 and \
+ len(user_preferences.api_key) < 38 and len(user_preferences.api_key) > 0 and \
user_preferences.api_key_timeout < time.time() + 3600:
bkit_oauth.refresh_token_thread()
if api_key != '' and bpy.context.window_manager.get('bkit profile') == None:
@@ -266,7 +289,7 @@ def parse_result(r):
# attempt to switch to use original data gradually, since the parsing as itself should become obsolete.
asset_data.update(r)
- return asset_data
+ return asset_data
# @bpy.app.handlers.persistent
@@ -280,16 +303,16 @@ def timer_update():
# TODO here it should check if there are some results, and only open assetbar if this is the case, not search.
# if bpy.context.scene.get('search results') is None:
search()
- preferences.first_run = False
+ # preferences.first_run = False
if preferences.tips_on_start:
- ui.get_largest_3dview()
+ utils.get_largest_area()
ui.update_ui_size(ui.active_area, ui.active_region)
ui.add_report(text='BlenderKit Tip: ' + random.choice(rtips), timeout=12, color=colors.GREEN)
return 3.0
- if preferences.first_run:
- search()
- preferences.first_run = False
+ # if preferences.first_run:
+ # search()
+ # preferences.first_run = False
# check_clipboard()
@@ -312,20 +335,17 @@ def timer_update():
asset_type = thread[2]
if asset_type == 'model':
props = scene.blenderkit_models
- json_filepath = os.path.join(icons_dir, 'model_searchresult.json')
- search_name = 'bkit model search'
+ # json_filepath = os.path.join(icons_dir, 'model_searchresult.json')
if asset_type == 'scene':
props = scene.blenderkit_scene
- json_filepath = os.path.join(icons_dir, 'scene_searchresult.json')
- search_name = 'bkit scene search'
+ # json_filepath = os.path.join(icons_dir, 'scene_searchresult.json')
if asset_type == 'material':
props = scene.blenderkit_mat
- json_filepath = os.path.join(icons_dir, 'material_searchresult.json')
- search_name = 'bkit material search'
+ # json_filepath = os.path.join(icons_dir, 'material_searchresult.json')
if asset_type == 'brush':
props = scene.blenderkit_brush
- json_filepath = os.path.join(icons_dir, 'brush_searchresult.json')
- search_name = 'bkit brush search'
+ # json_filepath = os.path.join(icons_dir, 'brush_searchresult.json')
+ search_name = f'bkit {asset_type} search'
s[search_name] = []
@@ -333,8 +353,8 @@ def timer_update():
if reports != '':
props.report = str(reports)
return .2
- with open(json_filepath, 'r') as data_file:
- rdata = json.load(data_file)
+
+ rdata = thread[0].result
result_field = []
ok, error = check_errors(rdata)
@@ -348,8 +368,9 @@ def timer_update():
# results = rdata['results']
s[search_name] = result_field
s['search results'] = result_field
- s[search_name + ' orig'] = rdata
- s['search results orig'] = rdata
+ s[search_name + ' orig'] = copy.deepcopy(rdata)
+ s['search results orig'] = s[search_name + ' orig']
+
load_previews()
ui_props = bpy.context.scene.blenderkitUI
if len(result_field) < ui_props.scrolloffset:
@@ -360,9 +381,6 @@ def timer_update():
if len(s['search results']) == 0:
tasks_queue.add_task((ui.add_report, ('No matching results found.',)))
- # (rdata['next'])
- # if rdata['next'] != None:
- # search(False, get_next = True)
else:
print('error', error)
props.report = error
@@ -374,18 +392,11 @@ def timer_update():
def load_previews():
- mappingdict = {
- 'MODEL': 'model',
- 'SCENE': 'scene',
- 'MATERIAL': 'material',
- 'TEXTURE': 'texture',
- 'BRUSH': 'brush'
- }
+
scene = bpy.context.scene
# FIRST START SEARCH
props = scene.blenderkitUI
-
- directory = paths.get_temp_dir('%s_search' % mappingdict[props.asset_type])
+ directory = paths.get_temp_dir('%s_search' % props.asset_type.lower())
s = bpy.context.scene
results = s.get('search results')
#
@@ -411,7 +422,7 @@ def load_previews():
img.unpack(method='USE_ORIGINAL')
img.filepath = tpath
img.reload()
- img.colorspace_settings.name = 'Linear'
+ img.colorspace_settings.name = 'sRGB'
i += 1
# print('previews loaded')
@@ -694,7 +705,7 @@ def write_gravatar(a_id, gravatar_path):
def fetch_gravatar(adata):
utils.p('fetch gravatar')
if adata.get('gravatarHash') is not None:
- gravatar_path = paths.get_temp_dir(subdir='g/') + adata['gravatarHash'] + '.jpg'
+ gravatar_path = paths.get_temp_dir(subdir='bkit_g/') + adata['gravatarHash'] + '.jpg'
if os.path.exists(gravatar_path):
tasks_queue.add_task((write_gravatar, (adata['id'], gravatar_path)))
@@ -790,11 +801,12 @@ def get_profile():
class Searcher(threading.Thread):
query = None
- def __init__(self, query, params):
+ def __init__(self, query, params,orig_result):
super(Searcher, self).__init__()
self.query = query
self.params = params
self._stop_event = threading.Event()
+ self.result = orig_result
def stop(self):
self._stop_event.set()
@@ -854,7 +866,7 @@ class Searcher(threading.Thread):
t = time.time()
mt('search thread started')
tempdir = paths.get_temp_dir('%s_search' % query['asset_type'])
- json_filepath = os.path.join(tempdir, '%s_searchresult.json' % query['asset_type'])
+ # json_filepath = os.path.join(tempdir, '%s_searchresult.json' % query['asset_type'])
headers = utils.get_headers(params['api_key'])
@@ -862,23 +874,11 @@ class Searcher(threading.Thread):
rdata['results'] = []
if params['get_next']:
- with open(json_filepath, 'r') as infile:
- try:
- origdata = json.load(infile)
- urlquery = origdata['next']
- # rparameters = {}
- if urlquery == None:
- return;
- except:
- # in case no search results found on drive we don't do next page loading.
- params['get_next'] = False
+ urlquery = self.result['next']
if not params['get_next']:
- url = paths.get_api_url() + 'search/'
+ urlquery = self.query_to_url()
- urlquery = url
- # rparameters = query
- urlquery = self.query_to_url()
try:
utils.p(urlquery)
r = rerequests.get(urlquery, headers=headers) # , params = rparameters)
@@ -941,10 +941,10 @@ class Searcher(threading.Thread):
# we save here because a missing thumbnail check is in the previous loop
# we can also prepend previous results. These have downloaded thumbnails already...
if params['get_next']:
- rdata['results'][0:0] = origdata['results']
-
- with open(json_filepath, 'w') as outfile:
- json.dump(rdata, outfile)
+ rdata['results'][0:0] = self.result['results']
+ self.result = rdata
+ # with open(json_filepath, 'w') as outfile:
+ # json.dump(rdata, outfile)
killthreads_sml = []
for k in thumb_sml_download_threads.keys():
@@ -1157,7 +1157,7 @@ def mt(text):
utils.p(text, alltime, since_last)
-def add_search_process(query, params):
+def add_search_process(query, params, orig_result):
global search_threads
while (len(search_threads) > 0):
@@ -1166,10 +1166,10 @@ def add_search_process(query, params):
# TODO CARE HERE FOR ALSO KILLING THE THREADS...AT LEAST NOW SEARCH DONE FIRST WON'T REWRITE AN OLDER ONE
tempdir = paths.get_temp_dir('%s_search' % query['asset_type'])
- thread = Searcher(query, params)
+ thread = Searcher(query, params, orig_result)
thread.start()
- search_threads.append([thread, tempdir, query['asset_type']])
+ search_threads.append([thread, tempdir, query['asset_type'],{}])# 4th field is for results
mt('thread started')
@@ -1195,13 +1195,14 @@ def search(category='', get_next=False, author_id=''):
return;
props = scene.blenderkit_scene
query = build_query_scene()
+
if ui_props.asset_type == 'MATERIAL':
if not hasattr(scene, 'blenderkit_mat'):
return;
props = scene.blenderkit_mat
query = build_query_material()
- utils.p(query)
+
if ui_props.asset_type == 'TEXTURE':
if not hasattr(scene, 'blenderkit_tex'):
@@ -1209,12 +1210,14 @@ def search(category='', get_next=False, author_id=''):
# props = scene.blenderkit_tex
# query = build_query_texture()
+
if ui_props.asset_type == 'BRUSH':
if not hasattr(scene, 'blenderkit_brush'):
return;
props = scene.blenderkit_brush
query = build_query_brush()
+
if props.is_searching and get_next == True:
return;
@@ -1242,8 +1245,11 @@ def search(category='', get_next=False, author_id=''):
# if free_only:
# query['keywords'] += '+is_free:true'
-
- add_search_process(query, params)
+ orig_results = scene.get(f'bkit {ui_props.asset_type.lower()} search orig', {})
+ if orig_results != {}:
+ #ensure it's a copy in dict for what we are passing to thread:
+ orig_results = orig_results.to_dict()
+ add_search_process(query, params, orig_results)
tasks_queue.add_task((ui.add_report, ('BlenderKit searching....', 2)))
props.report = 'BlenderKit searching....'
diff --git a/blenderkit/tasks_queue.py b/blenderkit/tasks_queue.py
index bbac6d63..5a327290 100644
--- a/blenderkit/tasks_queue.py
+++ b/blenderkit/tasks_queue.py
@@ -45,15 +45,17 @@ def get_queue():
return t.task_queue
class task_object:
- def __init__(self, command = '', arguments = (), wait = 0, only_last = False):
+ def __init__(self, command = '', arguments = (), wait = 0, only_last = False, fake_context = False, fake_context_area = 'VIEW_3D'):
self.command = command
self.arguments = arguments
self.wait = wait
self.only_last = only_last
+ self.fake_context = fake_context
+ self.fake_context_area = fake_context_area
-def add_task(task, wait = 0, only_last = False):
+def add_task(task, wait = 0, only_last = False, fake_context = False, fake_context_area = 'VIEW_3D'):
q = get_queue()
- taskob = task_object(task[0],task[1], wait = wait, only_last = only_last)
+ taskob = task_object(task[0],task[1], wait = wait, only_last = only_last, fake_context = fake_context, fake_context_area = fake_context_area)
q.put(taskob)
@@ -90,7 +92,11 @@ def queue_worker():
utils.p('as a task: ')
utils.p(task.command, task.arguments)
try:
- task.command(*task.arguments)
+ if task.fake_context:
+ fc = utils.get_fake_context(bpy.context, area_type = task.fake_context_area)
+ task.command(fc,*task.arguments)
+ else:
+ task.command(*task.arguments)
except Exception as e:
utils.p('task failed:')
print(e)
diff --git a/blenderkit/ui.py b/blenderkit/ui.py
index f56dad77..47bf1a51 100644
--- a/blenderkit/ui.py
+++ b/blenderkit/ui.py
@@ -738,7 +738,7 @@ def draw_callback_2d_search(self, context):
highlight = (1, 1, 1, .2)
# highlight = (1, 1, 1, 0.8)
# background of asset bar
- if not ui_props.dragging:
+ if not ui_props.dragging and ui_props.hcount>0:
search_results = s.get('search results')
search_results_orig = s.get('search results orig')
if search_results == None:
@@ -796,14 +796,14 @@ def draw_callback_2d_search(self, context):
index = a + ui_props.scrolloffset + b * ui_props.wcount
iname = utils.previmg_name(index)
img = bpy.data.images.get(iname)
-
- w = int(ui_props.thumb_size * img.size[0] / max(img.size[0], img.size[1]))
- h = int(ui_props.thumb_size * img.size[1] / max(img.size[0], img.size[1]))
- crop = (0, 0, 1, 1)
- if img.size[0] > img.size[1]:
- offset = (1 - img.size[1] / img.size[0]) / 2
- crop = (offset, 0, 1 - offset, 1)
if img is not None:
+ w = int(ui_props.thumb_size * img.size[0] / max(img.size[0], img.size[1]))
+ h = int(ui_props.thumb_size * img.size[1] / max(img.size[0], img.size[1]))
+ crop = (0, 0, 1, 1)
+ if img.size[0] > img.size[1]:
+ offset = (1 - img.size[1] / img.size[0]) / 2
+ crop = (offset, 0, 1 - offset, 1)
+
ui_bgl.draw_image(x, y, w, w, img, 1,
crop=crop)
if index == ui_props.active_index:
@@ -815,7 +815,7 @@ def draw_callback_2d_search(self, context):
# w + 2*highlight_margin, h + 2*highlight_margin , highlight)
else:
- ui_bgl.draw_rect(x, y, w, h, white)
+ ui_bgl.draw_rect(x, y, ui_props.thumb_size, ui_props.thumb_size, white)
result = search_results[index]
if result['downloaded'] > 0:
@@ -876,7 +876,8 @@ def draw_callback_2d_search(self, context):
else:
iname = utils.previmg_name(ui_props.active_index)
img = bpy.data.images.get(iname)
- img.colorspace_settings.name = 'Linear'
+ if img:
+ img.colorspace_settings.name = 'sRGB'
gimg = None
atip = ''
@@ -926,14 +927,26 @@ def mouse_raycast(context, mx, my):
vec = ray_target - ray_origin
has_hit, snapped_location, snapped_normal, face_index, object, matrix = bpy.context.scene.ray_cast(
- bpy.context.view_layer, ray_origin, vec)
+ bpy.context.view_layer.depsgraph, ray_origin, vec)
# rote = mathutils.Euler((0, 0, math.pi))
randoffset = math.pi
if has_hit:
- snapped_rotation = snapped_normal.to_track_quat('Z', 'Y').to_euler()
- up = Vector((0, 0, 1))
props = bpy.context.scene.blenderkit_models
+ up = Vector((0, 0, 1))
+
+ if props.perpendicular_snap:
+ if snapped_normal.z > 1 - props.perpendicular_snap_threshold:
+ snapped_normal = Vector((0, 0, 1))
+ elif snapped_normal.z < -1 + props.perpendicular_snap_threshold:
+ snapped_normal = Vector((0, 0, -1))
+ elif abs(snapped_normal.z) < props.perpendicular_snap_threshold:
+ snapped_normal.z = 0
+ snapped_normal.normalize()
+
+ snapped_rotation = snapped_normal.to_track_quat('Z', 'Y').to_euler()
+
+
if props.randomize_rotation and snapped_normal.angle(up) < math.radians(10.0):
randoffset = props.offset_rotation_amount + math.pi + (
random.random() - 0.5) * props.randomize_rotation_amount
@@ -1006,9 +1019,12 @@ def is_rating_possible():
# crawl parents to reach active asset. there could have been parenting so we need to find the first onw
ao_check = ao
while ad is None or (ad is None and ao_check.parent is not None):
+ s = bpy.context.scene
ad = ao_check.get('asset_data')
if ad is not None:
- rated = bpy.context.scene['assets rated'].get(ad['assetBaseId'])
+
+ s['assets rated'] = s.get('assets rated',{})
+ rated = s['assets rated'].get(ad['assetBaseId'])
# originally hidden for already rated assets
return True, rated, ao_check, ad
elif ao_check.parent is not None:
@@ -1020,9 +1036,10 @@ def is_rating_possible():
m = ao.active_material
if m is not None:
ad = m.get('asset_data')
- if ad is not None:
+ if ad is not None and ad.get('assetBaseId'):
rated = bpy.context.scene['assets rated'].get(ad['assetBaseId'])
- return True, rated, m, ad
+ if rated:
+ return True, rated, m, ad
# if t>2 and t<2.5:
# ui_props.rating_on = False
@@ -1181,30 +1198,6 @@ def update_ui_size(area, region):
ui.rating_y = ui.bar_y - ui.bar_height
-def get_largest_3dview():
- maxsurf = 0
- maxa = None
- maxw = None
- region = None
- for w in bpy.context.window_manager.windows:
- screen = w.screen
- for a in screen.areas:
- if a.type == 'VIEW_3D':
- asurf = a.width * a.height
- if asurf > maxsurf:
- maxa = a
- maxw = w
- maxsurf = asurf
-
- for r in a.regions:
- if r.type == 'WINDOW':
- region = r
- global active_area, active_window, active_region
- active_window = maxw
- active_area = maxa
- active_region = region
- return maxw, maxa, region
-
class AssetBarOperator(bpy.types.Operator):
'''runs search and displays the asset bar at the same time'''
@@ -1688,10 +1681,13 @@ class AssetBarOperator(bpy.types.Operator):
if a is not None:
sprops = utils.get_search_props()
sprops.search_keywords = ''
+ sprops.search_verification_status = 'ALL'
utils.p('author:', a)
search.search(author_id=a)
return {'RUNNING_MODAL'}
+
if event.type == 'X' and ui_props.active_index > -1:
+ # delete downloaded files for this asset
sr = bpy.context.scene['search results']
asset_data = sr[ui_props.active_index]
print(asset_data['name'])
@@ -1803,16 +1799,12 @@ class UndoWithContext(bpy.types.Operator):
message: StringProperty('Undo Message', default='BlenderKit operation')
def execute(self, context):
- C_dict = bpy.context.copy()
- C_dict.update(region='WINDOW')
- if context.area is None or context.area.type != 'VIEW_3D':
- w, a, r = get_largest_3dview()
- override = {'window': w, 'screen': w.screen, 'area': a, 'region': r}
- C_dict.update(override)
+ C_dict = utils.get_fake_context(context)
bpy.ops.ed.undo_push(C_dict, 'INVOKE_REGION_WIN', message=self.message)
return {'FINISHED'}
+
class RunAssetBarWithContext(bpy.types.Operator):
"""Regenerate cobweb"""
bl_idname = "object.run_assetbar_fix_context"
@@ -1824,12 +1816,7 @@ class RunAssetBarWithContext(bpy.types.Operator):
# return {'RUNNING_MODAL'}
def execute(self, context):
- C_dict = bpy.context.copy()
- C_dict.update(region='WINDOW')
- if context.area is None or context.area.type != 'VIEW_3D':
- w, a, r = get_largest_3dview()
- override = {'window': w, 'screen': w.screen, 'area': a, 'region': r}
- C_dict.update(override)
+ C_dict = utils.get_fake_context(context)
bpy.ops.view3d.blenderkit_asset_bar(C_dict, 'INVOKE_REGION_WIN', keep_running=True, do_search=False)
return {'FINISHED'}
@@ -1871,13 +1858,15 @@ def register_ui():
if not wm.keyconfigs.addon:
return
km = wm.keyconfigs.addon.keymaps.new(name="Window", space_type='EMPTY')
+ #asset bar shortcut
kmi = km.keymap_items.new(AssetBarOperator.bl_idname, 'SEMI_COLON', 'PRESS', ctrl=False, shift=False)
kmi.properties.keep_running = False
kmi.properties.do_search = False
addon_keymapitems.append(kmi)
- # auto open after searching:
- kmi = km.keymap_items.new(RunAssetBarWithContext.bl_idname, 'SEMI_COLON', 'PRESS', \
- ctrl=True, shift=True, alt=True)
+ #fast rating shortcut
+ wm = bpy.context.window_manager
+ km = wm.keyconfigs.addon.keymaps['Window']
+ kmi = km.keymap_items.new(ratings.FastRateMenu.bl_idname, 'F', 'PRESS', ctrl=False, shift=False)
addon_keymapitems.append(kmi)
diff --git a/blenderkit/ui_panels.py b/blenderkit/ui_panels.py
index 4efe732f..a591e76e 100644
--- a/blenderkit/ui_panels.py
+++ b/blenderkit/ui_panels.py
@@ -25,65 +25,48 @@ if "bpy" in locals():
download = importlib.reload(download)
categories = importlib.reload(categories)
icons = importlib.reload(icons)
+ icons = importlib.reload(search)
else:
- from blenderkit import paths, ratings, utils, download, categories, icons
+ from blenderkit import paths, ratings, utils, download, categories, icons, search
from bpy.types import (
Panel
)
+from bpy.props import (
+ IntProperty,
+ FloatProperty,
+ FloatVectorProperty,
+ StringProperty,
+ EnumProperty,
+ BoolProperty,
+ PointerProperty,
+)
import bpy
-
-
-def label_multiline(layout, text='', icon='NONE', width=-1):
- ''' draw a ui label, but try to split it in multiple lines.'''
- if text.strip() == '':
- return
- lines = text.split('\n')
- if width > 0:
- threshold = int(width / 5.5)
- else:
- threshold = 35
- maxlines = 8
- li = 0
- for l in lines:
- while len(l) > threshold:
- i = l.rfind(' ', 0, threshold)
- if i < 1:
- i = threshold
- l1 = l[:i]
- layout.label(text=l1, icon=icon)
- icon = 'NONE'
- l = l[i:].lstrip()
- li += 1
- if li > maxlines:
- break;
- if li > maxlines:
- break;
- layout.label(text=l, icon=icon)
- icon = 'NONE'
+import os
+import random
# this was moved to separate interface:
-def draw_ratings(layout, context):
+def draw_ratings(layout, context, asset):
# layout.operator("wm.url_open", text="Read rating instructions", icon='QUESTION').url = 'https://support.google.com/?hl=en'
- asset = utils.get_active_asset()
# the following shouldn't happen at all in an optimal case,
# this function should run only when asset was already checked to be existing
if asset == None:
return;
- if not utils.user_logged_in():
- label_multiline(layout, text='Please login or sign up '
- 'to rate assets.')
- return
+ col = layout.column()
bkit_ratings = asset.bkit_ratings
- ratings.draw_rating(layout, bkit_ratings, 'rating_quality', 'Quality')
- layout.separator()
- layout.prop(bkit_ratings, 'rating_work_hours')
- w = context.region.width
+ # layout.template_icon_view(bkit_ratings, property, show_labels=False, scale=6.0, scale_popup=5.0)
+
+ row = col.row()
+ row.prop(bkit_ratings, 'rating_quality_ui', expand=True, icon_only=True, emboss=False)
+ if bkit_ratings.rating_quality > 0:
+ col.separator()
+ col.prop(bkit_ratings, 'rating_work_hours')
+ # w = context.region.width
# layout.label(text='problems')
# layout.prop(bkit_ratings, 'rating_problems', text='')
@@ -93,15 +76,15 @@ def draw_ratings(layout, context):
# row = layout.row()
# op = row.operator("object.blenderkit_rating_upload", text="Send rating", icon='URL')
# return op
+ # re-enable layout if included in longer panel
-def draw_not_logged_in(source):
- title = "User not logged in"
+def draw_not_logged_in(source, message='Please Login/Signup to use this feature'):
+ title = "You aren't logged in"
def draw_message(source, context):
layout = source.layout
- label_multiline(layout, text='Please login or sign up '
- 'to upload files.')
+ utils.label_multiline(layout, text=message)
draw_login_buttons(layout)
bpy.context.window_manager.popup_menu(draw_message, title=title, icon='INFO')
@@ -119,7 +102,7 @@ def draw_upload_common(layout, props, asset_type, context):
row = layout.row(align=True)
if props.upload_state != '':
- label_multiline(layout, text=props.upload_state, width=context.region.width)
+ utils.label_multiline(layout, text=props.upload_state, width=context.region.width)
if props.uploading:
op = layout.operator('object.kill_bg_process', text="", icon='CANCEL')
op.process_source = asset_type
@@ -208,7 +191,7 @@ def draw_panel_model_upload(self, context):
op.process_source = 'MODEL'
op.process_type = 'THUMBNAILER'
elif props.thumbnail_generating_state != '':
- label_multiline(layout, text=props.thumbnail_generating_state)
+ utils.label_multiline(layout, text=props.thumbnail_generating_state)
layout.prop(props, 'description')
layout.prop(props, 'tags')
@@ -272,7 +255,7 @@ def draw_panel_scene_upload(self, context):
# op.process_source = 'MODEL'
# op.process_type = 'THUMBNAILER'
# elif props.thumbnail_generating_state != '':
- # label_multiline(layout, text = props.thumbnail_generating_state)
+ # utils.label_multiline(layout, text = props.thumbnail_generating_state)
layout.prop(props, 'description')
layout.prop(props, 'tags')
@@ -317,7 +300,7 @@ def draw_panel_model_search(self, context):
icon = 'NONE'
if props.report == 'You need Full plan to get this item.':
icon = 'ERROR'
- label_multiline(layout, text=props.report, icon=icon)
+ utils.label_multiline(layout, text=props.report, icon=icon)
if props.report == 'You need Full plan to get this item.':
layout.operator("wm.url_open", text="Get Full plan", icon='URL').url = paths.BLENDERKIT_PLANS
@@ -335,7 +318,6 @@ def draw_panel_model_search(self, context):
# draw_panel_categories(self, context)
-
def draw_panel_scene_search(self, context):
s = context.scene
props = s.blenderkit_scene
@@ -345,7 +327,7 @@ def draw_panel_scene_search(self, context):
row.prop(props, "search_keywords", text="", icon='VIEWZOOM')
draw_assetbar_show_hide(row, props)
layout.prop(props, "own_only")
- label_multiline(layout, text=props.report)
+ utils.label_multiline(layout, text=props.report)
# layout.prop(props, "search_style")
# if props.search_style == 'OTHER':
@@ -375,7 +357,8 @@ class VIEW3D_PT_blenderkit_model_properties(Panel):
o = utils.get_active_model()
# o = bpy.context.active_object
if o.get('asset_data') is None:
- label_multiline(layout, text='To upload this asset to BlenderKit, go to the Find and Upload Assets panel.')
+ utils.label_multiline(layout,
+ text='To upload this asset to BlenderKit, go to the Find and Upload Assets panel.')
layout.prop(o, 'name')
if o.get('asset_data') is not None:
@@ -400,6 +383,52 @@ class VIEW3D_PT_blenderkit_model_properties(Panel):
# layout.operator('object.blenderkit_color_corrector')
+def draw_rating_asset(self, context, asset):
+ layout = self.layout
+ col = layout.box()
+ # split = layout.split(factor=0.5)
+ # col1 = split.column()
+ # col2 = split.column()
+ # print('%s_search' % asset['asset_data']['assetType'])
+ directory = paths.get_temp_dir('%s_search' % asset['asset_data']['assetType'])
+ tpath = os.path.join(directory, asset['asset_data']['thumbnail_small'])
+ for image in bpy.data.images:
+ if image.filepath == tpath:
+ # split = row.split(factor=1.0, align=False)
+ col.template_icon(icon_value=image.preview.icon_id, scale=6.0)
+ break;
+ # layout.label(text = '', icon_value=image.preview.icon_id, scale = 10)
+ col.label(text=asset.name)
+ draw_ratings(col, context, asset=asset)
+
+
+class VIEW3D_PT_blenderkit_ratings(Panel):
+ bl_category = "BlenderKit"
+ bl_idname = "VIEW3D_PT_blenderkit_ratings"
+ bl_space_type = 'VIEW_3D'
+ bl_region_type = 'UI'
+ bl_label = "Please rate"
+ bl_context = "objectmode"
+
+ @classmethod
+ def poll(cls, context):
+ #
+ p = bpy.context.view_layer.objects.active is not None
+ return p
+
+ def draw(self, context):
+ # TODO make a list of assets inside asset appending code, to happen only when assets are added to the scene.
+ # draw asset properties here
+ layout = self.layout
+ assets = ratings.get_assets_for_rating()
+ if len(assets) > 0:
+ layout.label(text='Help BlenderKit community')
+ layout.label(text='by rating these assets:')
+
+ for a in assets:
+ draw_rating_asset(self, context, asset=a)
+
+
def draw_login_progress(layout):
layout.label(text='Login through browser')
layout.label(text='in progress.')
@@ -491,7 +520,7 @@ def draw_panel_model_rating(self, context):
# o = bpy.context.active_object
o = utils.get_active_model()
# print('ratings active',o)
- draw_ratings(self.layout, context) # , props)
+ draw_ratings(self.layout, context, asset=o) # , props)
# op.asset_type = 'MODEL'
@@ -535,7 +564,7 @@ def draw_panel_material_upload(self, context):
op.process_source = 'MATERIAL'
op.process_type = 'THUMBNAILER'
elif props.thumbnail_generating_state != '':
- label_multiline(layout, text=props.thumbnail_generating_state)
+ utils.label_multiline(layout, text=props.thumbnail_generating_state)
if bpy.context.scene.render.engine in ('CYCLES', 'BLENDER_EEVEE'):
layout.operator("object.blenderkit_material_thumbnail", text='Render thumbnail with Cycles', icon='EXPORT')
@@ -556,23 +585,21 @@ def draw_panel_material_search(self, context):
row.prop(props, "search_keywords", text="", icon='VIEWZOOM')
draw_assetbar_show_hide(row, props)
layout.prop(props, "own_only")
- label_multiline(layout, text=props.report)
+ utils.label_multiline(layout, text=props.report)
- # layout.prop(props, 'search_style')
+ # layout.prop(props, 'search_style')F
# if props.search_style == 'OTHER':
# layout.prop(props, 'search_style_other')
# layout.prop(props, 'search_engine')
# if props.search_engine == 'OTHER':
# layout.prop(props, 'search_engine_other')
-
-
# draw_panel_categories(self, context)
-
def draw_panel_material_ratings(self, context):
- draw_ratings(self.layout, context) # , props)
+ asset = bpy.context.active_object.active_material
+ draw_ratings(self.layout, context, asset) # , props)
# op.asset_type = 'MATERIAL'
@@ -600,23 +627,28 @@ def draw_panel_brush_search(self, context):
draw_assetbar_show_hide(row, props)
layout.prop(props, "own_only")
- label_multiline(layout, text=props.report)
+ utils.label_multiline(layout, text=props.report)
# draw_panel_categories(self, context)
def draw_panel_brush_ratings(self, context):
# props = utils.get_brush_props(context)
- draw_ratings(self.layout, context) # , props)
+ brush = utils.get_active_brush()
+ draw_ratings(self.layout, context, asset=brush) # , props)
#
# op.asset_type = 'BRUSH'
-def draw_login_buttons(layout):
+def draw_login_buttons(layout, invoke=False):
user_preferences = bpy.context.preferences.addons['blenderkit'].preferences
if user_preferences.login_attempt:
draw_login_progress(layout)
else:
+ if invoke:
+ layout.operator_context = 'INVOKE_DEFAULT'
+ else:
+ layout.operator_context = 'EXEC_DEFAULT'
if user_preferences.api_key == '':
layout.operator("wm.blenderkit_login", text="Login",
icon='URL').signup = False
@@ -639,12 +671,11 @@ class VIEW3D_PT_blenderkit_advanced_model_search(Panel):
bl_label = "Search filters"
bl_options = {'DEFAULT_CLOSED'}
-
@classmethod
def poll(cls, context):
s = context.scene
ui_props = s.blenderkitUI
- return ui_props.down_up == 'SEARCH' and ui_props.asset_type =='MODEL'
+ return ui_props.down_up == 'SEARCH' and ui_props.asset_type == 'MODEL'
def draw(self, context):
s = context.scene
@@ -694,6 +725,7 @@ class VIEW3D_PT_blenderkit_advanced_model_search(Panel):
# ADULT
# layout.prop(props, "search_adult") # , text ='condition of object new/old e.t.c.')
+
class VIEW3D_PT_blenderkit_advanced_material_search(Panel):
bl_category = "BlenderKit"
bl_idname = "VIEW3D_PT_blenderkit_advanced_material_search"
@@ -707,7 +739,7 @@ class VIEW3D_PT_blenderkit_advanced_material_search(Panel):
def poll(cls, context):
s = context.scene
ui_props = s.blenderkitUI
- return ui_props.down_up == 'SEARCH' and ui_props.asset_type =='MATERIAL'
+ return ui_props.down_up == 'SEARCH' and ui_props.asset_type == 'MATERIAL'
def draw(self, context):
s = context.scene
@@ -735,6 +767,7 @@ class VIEW3D_PT_blenderkit_advanced_material_search(Panel):
row.prop(props, "search_file_size_min", text='min')
row.prop(props, "search_file_size_max", text='max')
+
class VIEW3D_PT_blenderkit_categories(Panel):
bl_category = "BlenderKit"
bl_idname = "VIEW3D_PT_blenderkit_categories"
@@ -748,10 +781,14 @@ class VIEW3D_PT_blenderkit_categories(Panel):
def poll(cls, context):
s = context.scene
ui_props = s.blenderkitUI
- return ui_props.down_up == 'SEARCH'
+ mode = True
+ if ui_props.asset_type == 'BRUSH' and not (context.sculpt_object or context.image_paint_object):
+ mode = False
+ return ui_props.down_up == 'SEARCH' and mode
def draw(self, context):
- draw_panel_categories(self,context)
+ draw_panel_categories(self, context)
+
class VIEW3D_PT_blenderkit_import_settings(Panel):
bl_category = "BlenderKit"
@@ -774,7 +811,6 @@ class VIEW3D_PT_blenderkit_import_settings(Panel):
s = context.scene
ui_props = s.blenderkitUI
-
if ui_props.asset_type == 'MODEL':
# noinspection PyCallByClass
props = s.blenderkit_models
@@ -784,6 +820,10 @@ class VIEW3D_PT_blenderkit_import_settings(Panel):
layout.prop(props, 'randomize_rotation')
if props.randomize_rotation:
layout.prop(props, 'randomize_rotation_amount')
+ layout.prop(props, 'perpendicular_snap')
+ if props.perpendicular_snap:
+ layout.prop(props,'perpendicular_snap_threshold')
+
if ui_props.asset_type == 'MATERIAL':
props = s.blenderkit_mat
layout.prop(props, 'automap')
@@ -819,7 +859,7 @@ class VIEW3D_PT_blenderkit_unified(Panel):
# row = row.split().row()
# layout.alert = True
# layout.alignment = 'CENTER'
- row = layout.row(align = True)
+ row = layout.row(align=True)
row.scale_x = 1.6
row.scale_y = 1.6
# split = row.split(factor=.5)
@@ -844,7 +884,7 @@ class VIEW3D_PT_blenderkit_unified(Panel):
layout.separator()
# if bpy.data.filepath == '':
# layout.alert = True
- # label_multiline(layout, text="It's better to save your file first.", width=w)
+ # utils.label_multiline(layout, text="It's better to save your file first.", width=w)
# layout.alert = False
# layout.separator()
@@ -866,7 +906,7 @@ class VIEW3D_PT_blenderkit_unified(Panel):
# noinspection PyCallByClass
draw_panel_brush_search(self, context)
else:
- label_multiline(layout, text='switch to paint or sculpt mode.', width=context.region.width)
+ utils.label_multiline(layout, text='switch to paint or sculpt mode.', width=context.region.width)
return
@@ -884,11 +924,11 @@ class VIEW3D_PT_blenderkit_unified(Panel):
if e not in ('CYCLES', 'BLENDER_EEVEE'):
rtext = 'Only Cycles and EEVEE render engines are currently supported. ' \
'Please use Cycles for all assets you upload to BlenderKit.'
- label_multiline(layout, rtext, icon='ERROR', width=w)
+ utils.label_multiline(layout, rtext, icon='ERROR', width=w)
return;
if ui_props.asset_type == 'MODEL':
- # label_multiline(layout, "Uploaded models won't be available in b2.79", icon='ERROR')
+ # utils.label_multiline(layout, "Uploaded models won't be available in b2.79", icon='ERROR')
if bpy.context.view_layer.objects.active is not None:
draw_panel_model_upload(self, context)
else:
@@ -897,12 +937,12 @@ class VIEW3D_PT_blenderkit_unified(Panel):
draw_panel_scene_upload(self, context)
elif ui_props.asset_type == 'MATERIAL':
- # label_multiline(layout, "Uploaded materials won't be available in b2.79", icon='ERROR')
+ # utils.label_multiline(layout, "Uploaded materials won't be available in b2.79", icon='ERROR')
if bpy.context.view_layer.objects.active is not None and bpy.context.active_object.active_material is not None:
draw_panel_material_upload(self, context)
else:
- label_multiline(layout, text='select object with material to upload materials', width=w)
+ utils.label_multiline(layout, text='select object with material to upload materials', width=w)
elif ui_props.asset_type == 'BRUSH':
if context.sculpt_object or context.image_paint_object:
@@ -937,6 +977,57 @@ class VIEW3D_PT_blenderkit_unified(Panel):
layout.label(text='not yet implemented')
+class BlenderKitWelcomeOperator(bpy.types.Operator):
+ """Login online on BlenderKit webpage"""
+
+ bl_idname = "wm.blenderkit_welcome"
+ bl_label = "Welcome to BlenderKit!"
+ bl_options = {'REGISTER', 'UNDO', 'INTERNAL'}
+
+ step: IntProperty(
+ name="step",
+ description="Tutorial Step",
+ default=0,
+ options={'SKIP_SAVE'}
+ )
+
+ @classmethod
+ def poll(cls, context):
+ return True
+
+ def draw(self, context):
+ layout = self.layout
+ if self.step == 0:
+ user_preferences = bpy.context.preferences.addons['blenderkit'].preferences
+
+ message = "BlenderKit connects from Blender to an online, " \
+ "community built shared library of models, " \
+ "materials, and brushes. " \
+ "Use addon preferences to set up where files will be saved in the Global directory setting."
+
+ utils.label_multiline(layout, text=message, width=300)
+ utils.label_multiline(layout, text="\n Let's start by searching for some cool materials?", width=300)
+ else:
+ message = "Operator Tutorial called with invalid step"
+
+ def execute(self, context):
+ if self.step == 0:
+ # move mouse:
+ # bpy.context.window_manager.windows[0].cursor_warp(1000, 1000)
+ # show n-key sidebar (spaces[index] has to be found for view3d too:
+ # bpy.context.window_manager.windows[0].screen.areas[5].spaces[0].show_region_ui = False
+ print('running search no')
+ ui_props = bpy.context.scene.blenderkitUI
+ ui_props.asset_type = 'MATERIAL'
+ bpy.context.scene.blenderkit_mat.search_keywords = 'ice'
+ # search.search()
+ return {'FINISHED'}
+
+ def invoke(self, context, event):
+ wm = bpy.context.window_manager
+ return wm.invoke_props_dialog(self)
+
+
def draw_asset_context_menu(self, context, asset_data):
layout = self.layout
ui_props = context.scene.blenderkitUI
@@ -947,16 +1038,22 @@ def draw_asset_context_menu(self, context, asset_data):
a = bpy.context.window_manager['bkit authors'].get(author_id)
if a is not None:
# utils.p('author:', a)
+ op = layout.operator('wm.url_open', text="Open Author's Website")
if a.get('aboutMeUrl') is not None:
- op = layout.operator('wm.url_open', text="Open Author's Website")
op.url = a['aboutMeUrl']
-
+ else:
+ op.url = paths.get_author_gallery_url(a['id'])
op = layout.operator('view3d.blenderkit_search', text="Show Assets By Author")
op.keywords = ''
op.author_id = author_id
op = layout.operator('view3d.blenderkit_search', text='Search Similar')
- op.keywords = asset_data['name'] + ' ' + asset_data['description'] + ' ' + ' '.join(asset_data['tags'])
+ #build search string from description and tags:
+ op.keywords = asset_data['name']
+ if asset_data.get('description'):
+ op.keywords += ' ' + asset_data.get('description')
+ op.keywords += ' '.join(asset_data.get('tags'))
+
if asset_data.get('canDownload') != 0:
if len(bpy.context.selected_objects) > 0 and ui_props.asset_type == 'MODEL':
aob = bpy.context.active_object
@@ -964,10 +1061,10 @@ def draw_asset_context_menu(self, context, asset_data):
aob = bpy.context.selected_objects[0]
op = layout.operator('scene.blenderkit_download', text='Replace Active Models')
- #this checks if the menu got called from right-click in assetbar(then index is 0 - x) or
+ # this checks if the menu got called from right-click in assetbar(then index is 0 - x) or
# from a panel(then replacement happens from the active model)
if ui_props.active_index == -3:
- #called from addon panel
+ # called from addon panel
o = utils.get_active_model()
op.asset_base_id = o['asset_data']['assetBaseId']
else:
@@ -1003,6 +1100,8 @@ def draw_asset_context_menu(self, context, asset_data):
op.asset_id = asset_data['id']
op.state = 'rejected'
+
+
if author_id == str(profile['user']['id']):
layout.label(text='Management tools:')
row = layout.row()
@@ -1010,9 +1109,13 @@ def draw_asset_context_menu(self, context, asset_data):
op = row.operator('object.blenderkit_change_status', text='Delete')
op.asset_id = asset_data['id']
op.state = 'deleted'
- # else:
- # #not an author - can rate
- # draw_ratings(layout, context)
+
+ if utils.profile_is_validator():
+ layout.label(text='Admin rating Tools:')
+
+ op = layout.operator('wm.blenderkit_menu_rating_upload', text='Fast rate')
+ op.asset_id = asset_data['id']
+ op.asset_type = asset_data['assetType']
class OBJECT_MT_blenderkit_asset_menu(bpy.types.Menu):
@@ -1028,6 +1131,17 @@ class OBJECT_MT_blenderkit_asset_menu(bpy.types.Menu):
draw_asset_context_menu(self, context, asset_data)
+class OBJECT_MT_blenderkit_login_menu(bpy.types.Menu):
+ bl_label = "BlenderKit login/signup:"
+ bl_idname = "OBJECT_MT_blenderkit_login_menu"
+
+ def draw(self, context):
+ layout = self.layout
+
+ # utils.label_multiline(layout, text=message)
+ draw_login_buttons(layout)
+
+
class SetCategoryOperator(bpy.types.Operator):
"""Visit subcategory"""
bl_idname = "view3d.blenderkit_set_category"
@@ -1086,7 +1200,7 @@ class UrlPopupDialog(bpy.types.Operator):
def draw(self, context):
layout = self.layout
- label_multiline(layout, text=self.message)
+ utils.label_multiline(layout, text=self.message)
layout.active_default = True
op = layout.operator("wm.url_open", text=self.link_text, icon='QUESTION')
@@ -1102,6 +1216,40 @@ class UrlPopupDialog(bpy.types.Operator):
return wm.invoke_props_dialog(self)
+class LoginPopupDialog(bpy.types.Operator):
+ """Generate Cycles thumbnail for model assets"""
+ bl_idname = "wm.blenderkit_url_dialog"
+ bl_label = "BlenderKit login"
+ bl_options = {'REGISTER', 'INTERNAL'}
+
+ message: bpy.props.StringProperty(
+ name="Message",
+ description="",
+ default="Your were logged out from BlenderKit. Please login again. ")
+
+ # @classmethod
+ # def poll(cls, context):
+ # return bpy.context.view_layer.objects.active is not None
+
+ def draw(self, context):
+ layout = self.layout
+ utils.label_multiline(layout, text=self.message)
+
+ layout.active_default = True
+ op = layout.operator
+ op = layout.operator("wm.url_open", text=self.link_text, icon='QUESTION')
+ op.url = self.url
+
+ def execute(self, context):
+ # start_thumbnailer(self, context)
+ return {'FINISHED'}
+
+ def invoke(self, context, event):
+ wm = context.window_manager
+
+ return wm.invoke_props_dialog(self)
+
+
def draw_panel_categories(self, context):
s = context.scene
ui_props = s.blenderkitUI
@@ -1225,11 +1373,15 @@ classess = (
VIEW3D_PT_blenderkit_categories,
VIEW3D_PT_blenderkit_import_settings,
VIEW3D_PT_blenderkit_model_properties,
+ # VIEW3D_PT_blenderkit_ratings,
VIEW3D_PT_blenderkit_downloads,
OBJECT_MT_blenderkit_asset_menu,
- UrlPopupDialog
+ OBJECT_MT_blenderkit_login_menu,
+ UrlPopupDialog,
+ BlenderKitWelcomeOperator,
)
+
def register_ui_panels():
for c in classess:
bpy.utils.register_class(c)
diff --git a/blenderkit/upload.py b/blenderkit/upload.py
index 3afe9815..14fbe6db 100644
--- a/blenderkit/upload.py
+++ b/blenderkit/upload.py
@@ -767,7 +767,7 @@ class UploadOperator(Operator):
layout.label(text="For updates of thumbnail or model use reupload.")
if props.is_private == 'PUBLIC':
- ui_panels.label_multiline(layout, text='public assets are validated several hours'
+ utils.label_multiline(layout, text='public assets are validated several hours'
' or days after upload. Remember always to '
'test download your asset to a clean file'
' to see if it uploaded correctly.'
@@ -777,7 +777,7 @@ class UploadOperator(Operator):
props = utils.get_upload_props()
if not utils.user_logged_in():
- ui_panels.draw_not_logged_in(self)
+ ui_panels.draw_not_logged_in(self, message = 'To upload assets you need to login/signup.')
return {'CANCELLED'}
if props.is_private == 'PUBLIC':
diff --git a/blenderkit/utils.py b/blenderkit/utils.py
index 289ec817..5236aabb 100644
--- a/blenderkit/utils.py
+++ b/blenderkit/utils.py
@@ -32,9 +32,6 @@ import json
import os
import sys
-
-
-
ABOVE_NORMAL_PRIORITY_CLASS = 0x00008000
BELOW_NORMAL_PRIORITY_CLASS = 0x00004000
HIGH_PRIORITY_CLASS = 0x00000080
@@ -42,12 +39,14 @@ IDLE_PRIORITY_CLASS = 0x00000040
NORMAL_PRIORITY_CLASS = 0x00000020
REALTIME_PRIORITY_CLASS = 0x00000100
+
def get_process_flags():
flags = BELOW_NORMAL_PRIORITY_CLASS
if sys.platform != 'win32': # TODO test this on windows
flags = 0
return flags
+
def activate(ob):
bpy.ops.object.select_all(action='DESELECT')
ob.select_set(True)
@@ -97,11 +96,12 @@ def get_selected_models():
parents.append(ob)
done[ob] = True
- #if no blenderkit - like objects were found, use the original selection.
+ # if no blenderkit - like objects were found, use the original selection.
if len(parents) == 0:
parents = obs
return parents
+
def get_selected_replace_adepts():
'''
Detect all hierarchies that contain either asset data from selection, or selected objects themselves.
@@ -127,11 +127,12 @@ def get_selected_replace_adepts():
done[ob] = True
# print(parents)
- #if no blenderkit - like objects were found, use the original selection.
+ # if no blenderkit - like objects were found, use the original selection.
if len(parents) == 0:
parents = obs
return parents
+
def get_search_props():
scene = bpy.context.scene
if scene is None:
@@ -209,9 +210,9 @@ def get_upload_props():
def previmg_name(index, fullsize=False):
if not fullsize:
- return '.bkit_preview_' + str(index).zfill(2)
+ return '.bkit_preview_' + str(index).zfill(3)
else:
- return '.bkit_preview_full_' + str(index).zfill(2)
+ return '.bkit_preview_full_' + str(index).zfill(3)
def get_active_brush():
@@ -238,7 +239,7 @@ def load_prefs():
def save_prefs(self, context):
# first check context, so we don't do this on registration or blender startup
- if not bpy.app.background: #(hasattr kills blender)
+ if not bpy.app.background: # (hasattr kills blender)
user_preferences = bpy.context.preferences.addons['blenderkit'].preferences
# we test the api key for length, so not a random accidentally typed sequence gets saved.
lk = len(user_preferences.api_key)
@@ -264,6 +265,17 @@ def save_prefs(self, context):
print(e)
+def get_hidden_texture(tpath, bdata_name, force_reload=False):
+ i = get_hidden_image(tpath, bdata_name, force_reload=force_reload)
+ bdata_name = f".{bdata_name}"
+ t = bpy.data.textures.get(bdata_name)
+ if t is None:
+ t = bpy.data.textures.new('.test', 'IMAGE')
+ if t.image != i:
+ t.image = i
+ return t
+
+
def get_hidden_image(tpath, bdata_name, force_reload=False):
hidden_name = '.%s' % bdata_name
img = bpy.data.images.get(hidden_name)
@@ -289,12 +301,12 @@ def get_hidden_image(tpath, bdata_name, force_reload=False):
img.filepath = tpath
img.reload()
- img.colorspace_settings.name = 'Linear'
+ img.colorspace_settings.name = 'sRGB'
elif force_reload:
if img.packed_file is not None:
img.unpack(method='USE_ORIGINAL')
img.reload()
- img.colorspace_settings.name = 'Linear'
+ img.colorspace_settings.name = 'sRGB'
return img
@@ -304,7 +316,7 @@ def get_thumbnail(name):
img = bpy.data.images.get(name)
if img == None:
img = bpy.data.images.load(p)
- img.colorspace_settings.name = 'Linear'
+ img.colorspace_settings.name = 'sRGB'
img.name = name
img.name = name
@@ -339,16 +351,19 @@ def get_hierarchy(ob):
obs.append(o)
return obs
-def select_hierarchy(ob, state = True):
+
+def select_hierarchy(ob, state=True):
obs = get_hierarchy(ob)
for ob in obs:
ob.select_set(state)
return obs
+
def delete_hierarchy(ob):
obs = get_hierarchy(ob)
bpy.ops.object.delete({"selected_objects": obs})
+
def get_bounds_snappable(obs, use_modifiers=False):
# progress('getting bounds of object(s)')
parent = obs[0]
@@ -464,13 +479,15 @@ def get_headers(api_key):
headers["Authorization"] = "Bearer %s" % api_key
return headers
+
def scale_2d(v, s, p):
'''scale a 2d vector with a pivot'''
return (p[0] + s[0] * (v[0] - p[0]), p[1] + s[1] * (v[1] - p[1]))
-def scale_uvs(ob, scale = 1.0, pivot = Vector((.5,.5))):
+
+def scale_uvs(ob, scale=1.0, pivot=Vector((.5, .5))):
mesh = ob.data
- if len(mesh.uv_layers)>0:
+ if len(mesh.uv_layers) > 0:
uv = mesh.uv_layers[mesh.uv_layers.active_index]
# Scale a UV map iterating over its coordinates to a given scale and with a pivot point
@@ -479,7 +496,7 @@ def scale_uvs(ob, scale = 1.0, pivot = Vector((.5,.5))):
# map uv cubic and switch of auto tex space and set it to 1,1,1
-def automap(target_object=None, target_slot=None, tex_size=1, bg_exception=False, just_scale = False):
+def automap(target_object=None, target_slot=None, tex_size=1, bg_exception=False, just_scale=False):
from blenderkit import bg_blender as bg
s = bpy.context.scene
mat_props = s.blenderkit_mat
@@ -531,9 +548,10 @@ def automap(target_object=None, target_slot=None, tex_size=1, bg_exception=False
# by now, it takes the basic uv map = 1 meter. also, it now doeasn't respect more materials on one object,
# it just scales whole UV.
if just_scale:
- scale_uvs(tob, scale=Vector((1/tex_size, 1/tex_size)))
+ scale_uvs(tob, scale=Vector((1 / tex_size, 1 / tex_size)))
bpy.context.view_layer.objects.active = actob
+
def name_update():
props = get_upload_props()
if props.name_old != props.name:
@@ -553,12 +571,14 @@ def name_update():
asset = get_active_asset()
asset.name = fname
+
def params_to_dict(params):
params_dict = {}
for p in params:
params_dict[p['parameterType']] = p['value']
return params_dict
+
def dict_to_params(inputs, parameters=None):
if parameters == None:
parameters = []
@@ -596,6 +616,7 @@ def profile_is_validator():
return True
return False
+
def guard_from_crash():
'''Blender tends to crash when trying to run some functions with the addon going through unregistration process.'''
if bpy.context.preferences.addons.get('blenderkit') is None:
@@ -603,3 +624,68 @@ def guard_from_crash():
if bpy.context.preferences.addons['blenderkit'].preferences is None:
return False;
return True
+
+
+def get_largest_area(area_type='VIEW_3D'):
+ maxsurf = 0
+ maxa = None
+ maxw = None
+ region = None
+ for w in bpy.context.window_manager.windows:
+ for a in w.screen.areas:
+ if a.type == area_type:
+ asurf = a.width * a.height
+ if asurf > maxsurf:
+ maxa = a
+ maxw = w
+ maxsurf = asurf
+
+ for r in a.regions:
+ if r.type == 'WINDOW':
+ region = r
+ global active_area, active_window, active_region
+ active_window = maxw
+ active_area = maxa
+ active_region = region
+ return maxw, maxa, region
+
+
+def get_fake_context(context, area_type='VIEW_3D'):
+ C_dict = {} # context.copy() #context.copy was a source of problems - incompatibility with addons that also define context
+ C_dict.update(region='WINDOW')
+ if context.area is None or context.area.type != area_type:
+ w, a, r = get_largest_area(area_type=area_type)
+
+ override = {'window': w, 'screen': w.screen, 'area': a, 'region': r}
+ C_dict.update(override)
+ # print(w,a,r)
+ return C_dict
+
+
+def label_multiline(layout, text='', icon='NONE', width=-1):
+ ''' draw a ui label, but try to split it in multiple lines.'''
+ if text.strip() == '':
+ return
+ lines = text.split('\n')
+ if width > 0:
+ threshold = int(width / 5.5)
+ else:
+ threshold = 35
+ maxlines = 8
+ li = 0
+ for l in lines:
+ while len(l) > threshold:
+ i = l.rfind(' ', 0, threshold)
+ if i < 1:
+ i = threshold
+ l1 = l[:i]
+ layout.label(text=l1, icon=icon)
+ icon = 'NONE'
+ l = l[i:].lstrip()
+ li += 1
+ if li > maxlines:
+ break;
+ if li > maxlines:
+ break;
+ layout.label(text=l, icon=icon)
+ icon = 'NONE'
diff --git a/bone_selection_sets.py b/bone_selection_sets.py
index 8e07f418..86338da2 100644
--- a/bone_selection_sets.py
+++ b/bone_selection_sets.py
@@ -50,13 +50,13 @@ from bpy.props import (
# Note: bones are stored by name, this means that if the bone is renamed,
# there can be problems. However, bone renaming is unlikely during animation.
class SelectionEntry(PropertyGroup):
- name: StringProperty(name="Bone Name")
+ name: StringProperty(name="Bone Name", override={'LIBRARY_OVERRIDABLE'})
class SelectionSet(PropertyGroup):
- name: StringProperty(name="Set Name")
- bone_ids: CollectionProperty(type=SelectionEntry)
- is_selected: BoolProperty(name="Is Selected")
+ name: StringProperty(name="Set Name", override={'LIBRARY_OVERRIDABLE'})
+ bone_ids: CollectionProperty(type=SelectionEntry, override={'LIBRARY_OVERRIDABLE'})
+ is_selected: BoolProperty(name="Is Selected", override={'LIBRARY_OVERRIDABLE'})
# UI Panel w/ UIList ##########################################################
@@ -545,12 +545,14 @@ def register():
bpy.types.Object.selection_sets = CollectionProperty(
type=SelectionSet,
name="Selection Sets",
- description="List of groups of bones for easy selection"
+ description="List of groups of bones for easy selection",
+ override={'LIBRARY_OVERRIDABLE'}
)
bpy.types.Object.active_selection_set = IntProperty(
name="Active Selection Set",
description="Index of the currently active selection set",
- default=0
+ default=0,
+ override={'LIBRARY_OVERRIDABLE'}
)
# Add shortcuts to the keymap.
diff --git a/io_coat3D/__init__.py b/io_coat3D/__init__.py
index 4f79557c..77318c74 100644
--- a/io_coat3D/__init__.py
+++ b/io_coat3D/__init__.py
@@ -36,7 +36,7 @@ else:
from bpy.app.handlers import persistent
from io_coat3D import tex
-#from io_coat3D import texVR
+from io_coat3D import texVR
import os
import ntpath
@@ -67,57 +67,6 @@ time_interval = 2.0
global_exchange_folder = ''
-
-
-@persistent
-def every_3_seconds():
- global global_exchange_folder
- global initial_settings
- path_ex = ''
-
- if(initial_settings):
- global_exchange_folder = set_exchange_folder()
- initial_settings = False
-
- coat3D = bpy.context.scene.coat3D
-
- Export_folder = global_exchange_folder
- Blender_folder = os.path.join(Export_folder, 'Blender')
-
- BlenderFolder = Blender_folder
- ExportFolder = Export_folder
-
- Blender_folder += ('%sexport.txt' % (os.sep))
- Export_folder += ('%sexport.txt' % (os.sep))
-
-
- if os.path.isfile(Export_folder):
-
- print('BLENDER -> 3DC -> BLENDER WORKFLLOW')
- DeleteExtra3DC()
- workflow1(ExportFolder)
- removeFile(Export_folder)
- removeFile(Blender_folder)
-
-
-
- elif os.path.isfile(Blender_folder):
-
- print('3DC -> BLENDER WORKFLLOW')
- DeleteExtra3DC()
- workflow2(BlenderFolder)
- removeFile(Blender_folder)
-
-
-
-
- return 3.0
-
-@persistent
-def load_handler(dummy):
- bpy.app.timers.register(every_3_seconds)
-
-
def removeFile(exportfile):
if (os.path.isfile(exportfile)):
os.remove(exportfile)
@@ -155,19 +104,44 @@ def set_exchange_folder():
if(os.path.isdir(exchange)):
bpy.coat3D['status'] = 1
+
if(platform == 'win32'):
+
exchange_path = os.path.expanduser("~") + os.sep + 'Documents' + os.sep + '3DC2Blender' + os.sep + 'Exchange_folder.txt'
applink_folder = os.path.expanduser("~") + os.sep + 'Documents' + os.sep + '3DC2Blender'
if(not(os.path.isdir(applink_folder))):
os.makedirs(applink_folder)
+
else:
+
exchange_path = os.path.expanduser("~") + os.sep + 'Documents' + os.sep + '3DC2Blender' + os.sep + 'Exchange_folder.txt'
applink_folder = os.path.expanduser("~") + os.sep + 'Documents' + os.sep + '3DC2Blender'
if(not(os.path.isdir(applink_folder))):
os.makedirs(applink_folder)
- file = open(exchange_path, "w")
- file.write("%s"%(coat3D.exchangedir))
- file.close()
+
+ if(os.path.isfile(exchange_path) == False):
+
+ file = open(exchange_path, "w")
+ file.write("%s"%(exchange_path))
+ file.close()
+
+ else:
+
+ exchangeline = open(exchange_path)
+ for line in exchangeline:
+ source = line
+ break
+ exchangeline.close()
+
+ if(source != coat3D.exchangedir and coat3D.exchangedir != '' and coat3D.exchangedir.rfind('Exchange') >= 0):
+
+ file = open(exchange_path, "w")
+ file.write("%s"%(coat3D.exchangedir))
+ file.close()
+ exchange = coat3D.exchangedir
+
+ else:
+ exchange = source
else:
if(platform == 'win32'):
@@ -300,7 +274,6 @@ def updatemesh(objekti, proxy, texturelist):
vertex_map_copy.data[loop_index].color = proxy.data.vertex_colors[0].data[loop_index].color
# UV -Sets
-
udim_textures = False
if(texturelist != []):
if(texturelist[0][0].startswith('100')):
@@ -311,7 +284,7 @@ def updatemesh(objekti, proxy, texturelist):
uv_count = len(proxy.data.uv_layers)
index = 0
- while(index < uv_count):
+ while(index < uv_count and len(proxy.data.polygons) == len(objekti.data.polygons)):
for poly in proxy.data.polygons:
for indi in poly.loop_indices:
if(proxy.data.uv_layers[index].data[indi].uv[0] != 0 and proxy.data.uv_layers[index].data[indi].uv[1] != 0):
@@ -322,8 +295,7 @@ def updatemesh(objekti, proxy, texturelist):
objekti.data.uv_layers[0].data[indi].uv[0] = proxy.data.uv_layers[index].data[indi].uv[0]
objekti.data.uv_layers[0].data[indi].uv[1] = proxy.data.uv_layers[index].data[indi].uv[1]
- if(udim_textures):
- objekti.data.uv_layers[0].data[indi].uv[0] += udim_index
+
index = index + 1
# Mesh Copy
@@ -333,6 +305,54 @@ def updatemesh(objekti, proxy, texturelist):
for ind, v in enumerate(objekti.data.vertices):
v.co = proxy.data.vertices[ind].co
+class SCENE_OT_getback(bpy.types.Operator):
+ bl_idname = "getback.pilgway_3d_coat"
+ bl_label = "Export your custom property"
+ bl_description = "Export your custom property"
+ bl_options = {'UNDO'}
+
+ def invoke(self, context, event):
+
+ global global_exchange_folder
+ global initial_settings
+ path_ex = ''
+
+ if(initial_settings):
+ global_exchange_folder = set_exchange_folder()
+ initial_settings = False
+
+ Export_folder = global_exchange_folder
+ Blender_folder = os.path.join(Export_folder, 'Blender')
+
+ BlenderFolder = Blender_folder
+ ExportFolder = Export_folder
+
+ Blender_folder += ('%sexport.txt' % (os.sep))
+ Export_folder += ('%sexport.txt' % (os.sep))
+
+ if (bpy.app.background == False):
+ if os.path.isfile(Export_folder):
+
+ print('BLENDER -> 3DC -> BLENDER WORKFLLOW')
+ DeleteExtra3DC()
+ workflow1(ExportFolder)
+ removeFile(Export_folder)
+ removeFile(Blender_folder)
+
+
+
+ elif os.path.isfile(Blender_folder):
+
+ print('3DC -> BLENDER WORKFLLOW')
+ DeleteExtra3DC()
+ workflow2(BlenderFolder)
+ removeFile(Blender_folder)
+
+
+
+ return {'FINISHED'}
+
+
class SCENE_OT_folder(bpy.types.Operator):
bl_idname = "update_exchange_folder.pilgway_3d_coat"
bl_label = "Export your custom property"
@@ -556,7 +576,7 @@ class SCENE_OT_export(bpy.types.Operator):
def invoke(self, context, event):
bpy.ops.export_applink.pilgway_3d_coat()
-
+
return {'FINISHED'}
def execute(self, context):
@@ -760,11 +780,11 @@ class SCENE_OT_export(bpy.types.Operator):
if(coat3D.type == 'autopo'):
coat3D.bring_retopo = True
coat3D.bring_retopo_path = checkname
- bpy.ops.export_scene.fbx(filepath=checkname, use_selection=True, use_mesh_modifiers=coat3D.exportmod, axis_forward='-Z', axis_up='Y')
+ bpy.ops.export_scene.fbx(filepath=checkname, global_scale = 0.01, use_selection=True, use_mesh_modifiers=coat3D.exportmod, axis_forward='-Z', axis_up='Y')
elif (coat3D.type == 'vox'):
coat3D.bring_retopo = False
- bpy.ops.export_scene.fbx(filepath=coa.applink_address, global_scale=1, use_selection=True,
+ bpy.ops.export_scene.fbx(filepath=coa.applink_address, global_scale = 0.01, use_selection=True,
use_mesh_modifiers=coat3D.exportmod, axis_forward='-Z', axis_up='Y')
else:
@@ -805,9 +825,10 @@ class SCENE_OT_export(bpy.types.Operator):
for ind, mat_list in enumerate(mod_mat_list):
- if(mat_list == objekti.name):
+ if(mat_list == '__' + objekti.name):
for ind, mat in enumerate(mod_mat_list[mat_list]):
objekti.material_slots[mod_mat_list[mat_list][ind][0]].material = mod_mat_list[mat_list][ind][1]
+
bpy.context.scene.render.engine = active_render
return {'FINISHED'}
@@ -834,7 +855,7 @@ def DeleteExtra3DC():
bpy.data.images.remove(del_img)
bpy.data.materials.remove(material)
-
+
image_del_list = []
for image in bpy.data.images:
if (image.name.startswith('3DC')):
@@ -906,6 +927,10 @@ def new_ref_function(new_applink_address, nimi):
refmesh.coat3D.applink_name = ''
refmesh.coat3D.applink_address = ''
refmesh.coat3D.type = ''
+ copymesh.scale = (1,1,1)
+ copymesh.coat3D.applink_scale = (1,1,1)
+ copymesh.location = (0,0,0)
+ copymesh.rotation_euler = (0,0,0)
def blender_3DC_blender(texturelist):
@@ -965,7 +990,6 @@ def blender_3DC_blender(texturelist):
for oname in object_list:
objekti = bpy.data.objects[oname]
-
if(objekti.coat3D.applink_mesh == True):
path3b_n = coat3D.exchangedir
@@ -1034,7 +1058,7 @@ def blender_3DC_blender(texturelist):
if objekti.coat3D.applink_firsttime == True and objekti.coat3D.type == 'vox':
objekti.select_set(True)
- objekti.scale = (1, 1, 1)
+ objekti.scale = (0.01, 0.01, 0.01)
objekti.rotation_euler[0] = 1.5708
objekti.rotation_euler[2] = 1.5708
bpy.ops.object.transforms_to_deltas(mode='ROT')
@@ -1062,8 +1086,7 @@ def blender_3DC_blender(texturelist):
#delete_materials_from_end(keep_materials_count, obj_proxy)
- for index, material in enumerate(objekti.material_slots):
- obj_proxy.material_slots[index-1].material = material.material
+
updatemesh(objekti,obj_proxy, texturelist)
bpy.context.view_layer.objects.active = objekti
@@ -1190,7 +1213,7 @@ def blender_3DC(texturelist, new_applink_address):
old_materials = bpy.data.materials.keys()
old_objects = bpy.data.objects.keys()
-
+
bpy.ops.import_scene.fbx(filepath=new_applink_address, global_scale = 1, axis_forward='-Z', axis_up='Y')
new_materials = bpy.data.materials.keys()
@@ -1247,6 +1270,7 @@ def blender_3DC(texturelist, new_applink_address):
if(facture_object):
texVR.matlab(new_obj, mat_list, texturelist, is_new)
+ new_obj.scale = (0.01, 0.01, 0.01)
else:
tex.matlab(new_obj, mat_list, texturelist, is_new)
@@ -1264,7 +1288,7 @@ def blender_3DC(texturelist, new_applink_address):
os.remove(Blender_export)
if (os.path.isfile(Blender_folder2)):
os.remove(Blender_folder2)
-
+
for material in bpy.data.materials:
if material.use_nodes == True:
for node in material.node_tree.nodes:
@@ -1285,9 +1309,9 @@ def workflow1(ExportFolder):
for image in bpy.data.images:
if(image.filepath == texturepath[3] and image.users == 0):
bpy.data.images.remove(image)
-
+
path3b_now = coat3D.exchangedir
-
+
path3b_now += ('last_saved_3b_file.txt')
new_applink_address = 'False'
new_object = False
@@ -1311,7 +1335,7 @@ def workflow1(ExportFolder):
new_ref_object = True
nimi = scene_objects.name
-
+
exportfile = coat3D.exchangedir
@@ -1344,9 +1368,9 @@ def workflow2(BlenderFolder):
kokeilu = coat3D.exchangedir
Blender_export = os.path.join(kokeilu, 'Blender')
-
+
path3b_now = coat3D.exchangedir
-
+
path3b_now += ('last_saved_3b_file.txt')
Blender_export += ('%sexport.txt'%(os.sep))
new_applink_address = 'False'
@@ -1354,7 +1378,6 @@ def workflow2(BlenderFolder):
new_ref_object = False
if(os.path.isfile(Blender_export)):
- print('blender')
obj_pathh = open(Blender_export)
new_object = True
for line in obj_pathh:
@@ -1420,9 +1443,10 @@ class SCENE_PT_Main(bpy.types.Panel):
row.prop(coat3D,"type",text = "")
flow = layout.grid_flow(row_major=True, columns=0, even_columns=False, even_rows=False, align=True)
- col = flow.column()
+ row = layout.row()
- col.operator("export_applink.pilgway_3d_coat", text="Send")
+ row.operator("export_applink.pilgway_3d_coat", text="Send")
+ row.operator("getback.pilgway_3d_coat", text="GetBack")
class ObjectButtonsPanel():
@@ -1944,6 +1968,11 @@ class MaterialCoat3D(PropertyGroup):
description="Import diffuse texture",
default=True
)
+ bring_gloss: BoolProperty(
+ name="Import diffuse texture",
+ description="Import diffuse texture",
+ default=True
+ )
classes = (
SCENE_PT_Main,
@@ -1957,6 +1986,7 @@ classes = (
SCENE_OT_folder,
SCENE_OT_opencoat,
SCENE_OT_export,
+ SCENE_OT_getback,
SCENE_OT_delete_material_nodes,
SCENE_OT_delete_object_nodes,
SCENE_OT_delete_collection_nodes,
@@ -2014,6 +2044,11 @@ def register():
description="Import alpha texture",
default=True
)
+ bpy.types.Material.coat3D_gloss = BoolProperty(
+ name="Import alpha texture",
+ description="Import alpha texture",
+ default=True
+ )
from bpy.utils import register_class
@@ -2024,8 +2059,6 @@ def register():
bpy.types.Scene.coat3D = PointerProperty(type=SceneCoat3D)
bpy.types.Mesh.coat3D = PointerProperty(type=MeshCoat3D)
bpy.types.Material.coat3D = PointerProperty(type=MaterialCoat3D)
- bpy.app.handlers.load_post.append(load_handler)
-
kc = bpy.context.window_manager.keyconfigs.addon
diff --git a/io_coat3D/data.json b/io_coat3D/data.json
index 4eaf03e8..8ed0b54e 100644
--- a/io_coat3D/data.json
+++ b/io_coat3D/data.json
@@ -9,7 +9,6 @@
"input": 0,
"rampnode": "no",
"huenode": "yes",
- "node_location": [ -400, 400 ],
"node_color": [0.535, 0.608, 0.306]
},
@@ -22,7 +21,6 @@
"input": 1,
"rampnode": "yes",
"huenode": "no",
- "node_location": [ -830, 160 ],
"node_color": [ 0.027, 0.324, 0.908 ]
},
@@ -35,7 +33,6 @@
"input": 2,
"rampnode": "yes",
"huenode": "no",
- "node_location": [ -550, 0 ],
"node_color": [ 0.608, 0.254, 0.000 ]
},
@@ -48,7 +45,6 @@
"input": 3,
"rampnode": "no",
"huenode": "no",
- "node_location": [ -650, -500 ],
"normal_node_location": [ -350, -350 ],
"node_color": [ 0.417, 0.363, 0.608 ]
},
@@ -60,7 +56,6 @@
"displacement": "yes",
"rampnode": "yes",
"huenode": "no",
- "node_location": [ 100, 100 ],
"input": 5,
"node_color": [ 0.608, 0.591, 0.498 ]
},
@@ -73,7 +68,6 @@
"find_input": [ "Emissive" ],
"rampnode": "no",
"huenode": "yes",
- "node_location": [ 100, 100 ],
"input": 4,
"node_color": [ 0.0, 0.0, 0.0 ]
},
@@ -85,7 +79,6 @@
"displacement": "no",
"rampnode": "no",
"huenode": "no",
- "node_location": [ 100, 100 ],
"node_color": [ 0.0, 0.0, 0.0 ]
},
@@ -96,7 +89,6 @@
"displacement": "no",
"rampnode": "yes",
"huenode": "no",
- "node_location": [ 100, 100 ],
"node_color": [ 0.1, 0.5, 0.1 ]
},
@@ -109,7 +101,6 @@
"input": 8,
"rampnode": "no",
"huenode": "no",
- "node_location": [ 0, -1000 ],
"node_color": [0.535, 0.608, 0.306]
}
diff --git a/io_coat3D/tex.py b/io_coat3D/tex.py
index 95dcf59c..21fa92b3 100644
--- a/io_coat3D/tex.py
+++ b/io_coat3D/tex.py
@@ -108,62 +108,42 @@ def updatetextures(objekti): # Update 3DC textures
for node in index_mat.material.node_tree.nodes:
if (node.type == 'TEX_IMAGE'):
- if (node.name == '3DC_color'):
- node.image.reload()
- elif (node.name == '3DC_metalness'):
- node.image.reload()
- elif (node.name == '3DC_rough'):
- node.image.reload()
- elif (node.name == '3DC_nmap'):
- node.image.reload()
- elif (node.name == '3DC_displacement'):
- node.image.reload()
- elif (node.name == '3DC_emissive'):
- node.image.reload()
- elif (node.name == '3DC_AO'):
- node.image.reload()
- elif (node.name == '3DC_alpha'):
- node.image.reload()
+ if (node.name == '3DC_color' or node.name == '3DC_metalness' or node.name == '3DC_rough' or node.name == '3DC_nmap'
+ or node.name == '3DC_displacement' or node.name == '3DC_emissive' or node.name == '3DC_AO' or node.name == '3DC_alpha'):
+ try:
+ node.image.reload()
+ except:
+ pass
for index_node_group in bpy.data.node_groups:
for node in index_node_group.nodes:
if (node.type == 'TEX_IMAGE'):
- if (node.name == '3DC_color'):
- node.image.reload()
- elif (node.name == '3DC_metalness'):
- node.image.reload()
- elif (node.name == '3DC_rough'):
- node.image.reload()
- elif (node.name == '3DC_nmap'):
- node.image.reload()
- elif (node.name == '3DC_displacement'):
- node.image.reload()
- elif (node.name == '3DC_emissive'):
- node.image.reload()
- elif (node.name == '3DC_AO'):
- node.image.reload()
- elif (node.name == '3DC_alpha'):
- node.image.reload()
+ if (node.name == '3DC_color' or node.name == '3DC_metalness' or node.name == '3DC_rough' or node.name == '3DC_nmap'
+ or node.name == '3DC_displacement' or node.name == '3DC_emissive' or node.name == '3DC_AO' or node.name == '3DC_alpha'):
+ try:
+ node.image.reload()
+ except:
+ pass
def testi(objekti, texture_info, index_mat_name, uv_MODE_mat, mat_index):
if uv_MODE_mat == 'UV':
-
+
uv_set_founded = False
for uvset in objekti.data.uv_layers:
-
+
if(uvset.name == texture_info):
uv_set_founded = True
-
+
break
-
+
if(uv_set_founded):
for uv_poly in objekti.data.uv_layers[texture_info].id_data.polygons:
if(mat_index == uv_poly.material_index):
return True
else:
return False
-
+
elif uv_MODE_mat == 'MAT':
return (texture_info == index_mat_name)
@@ -173,11 +153,6 @@ def readtexturefolder(objekti, mat_list, texturelist, is_new, udim_textures): #r
create_nodes = False
for ind, index_mat in enumerate(objekti.material_slots):
- if(udim_textures):
- tile_list = UVTiling(objekti,ind, texturelist)
- else:
- tile_list = []
-
texcoat = {}
texcoat['color'] = []
texcoat['ao'] = []
@@ -193,17 +168,17 @@ def readtexturefolder(objekti, mat_list, texturelist, is_new, udim_textures): #r
if(udim_textures == False):
for slot_index, texture_info in enumerate(texturelist):
- uv_MODE_mat = 'MAT'
+ uv_MODE_mat = 'MAT'
for index, layer in enumerate(objekti.data.uv_layers):
if(layer.name == texturelist[slot_index][0]):
uv_MODE_mat = 'UV'
break
-
+
if(testi(objekti, texturelist[slot_index][0], index_mat.name, uv_MODE_mat, ind)) :
if texture_info[2] == 'color' or texture_info[2] == 'diffuse':
if(index_mat.material.coat3D_diffuse):
-
+
texcoat['color'].append(texture_info[3])
create_nodes = True
else:
@@ -266,42 +241,51 @@ def readtexturefolder(objekti, mat_list, texturelist, is_new, udim_textures): #r
os.remove(texture_info[3])
create_group_node = True
-
+
else:
for texture_info in texturelist:
if texture_info[2] == 'color' or texture_info[2] == 'diffuse':
- texcoat['color'].append([texture_info[0],texture_info[3]])
- create_nodes = True
+ if texcoat['color'] == [] and texture_info[1] == '1001':
+ texcoat['color'].append(texture_info[3])
+ create_nodes = True
elif texture_info[2] == 'metalness' or texture_info[2] == 'specular' or texture_info[
2] == 'reflection':
- texcoat['metalness'].append([texture_info[0],texture_info[3]])
- create_nodes = True
+ if texcoat['metalness'] == [] and texture_info[1] == '1001':
+ texcoat['metalness'].append(texture_info[3])
+ create_nodes = True
elif texture_info[2] == 'rough' or texture_info[2] == 'roughness':
- texcoat['rough'].append([texture_info[0],texture_info[3]])
- create_nodes = True
+ if texcoat['rough'] == [] and texture_info[1] == '1001':
+ texcoat['rough'].append(texture_info[3])
+ create_nodes = True
elif texture_info[2] == 'nmap' or texture_info[2] == 'normalmap' or texture_info[
2] == 'normal_map' or texture_info[2] == 'normal':
- texcoat['nmap'].append([texture_info[0],texture_info[3]])
- create_nodes = True
+ if texcoat['nmap'] == [] and texture_info[1] == '1001':
+ texcoat['nmap'].append(texture_info[3])
+ create_nodes = True
elif texture_info[2] == 'emissive':
- texcoat['emissive'].append([texture_info[0],texture_info[3]])
- create_nodes = True
+ if texcoat['emissive'] == [] and texture_info[1] == '1001':
+ texcoat['emissive'].append(texture_info[3])
+ create_nodes = True
elif texture_info[2] == 'emissive_power':
- texcoat['emissive_power'].append([texture_info[0],texture_info[3]])
- create_nodes = True
+ if texcoat['emissive_power'] == [] and texture_info[1] == '1001':
+ texcoat['emissive_power'].append(texture_info[3])
+ create_nodes = True
elif texture_info[2] == 'ao':
- texcoat['ao'].append([texture_info[0],texture_info[3]])
- create_nodes = True
+ if texcoat['ao'] == [] and texture_info[1] == '1001':
+ texcoat['ao'].append(texture_info[3])
+ create_nodes = True
elif texture_info[2].startswith('displacement'):
- texcoat['displacement'].append([texture_info[0],texture_info[3]])
- create_nodes = True
+ if texcoat['displacement'] == [] and texture_info[1] == '1001':
+ texcoat['displacement'].append(texture_info[3])
+ create_nodes = True
if texture_info[2] == 'alpha' or texture_info[2] == 'opacity':
- texcoat['alpha'].append([texture_info[0], texture_info[3]])
- create_nodes = True
+ if texcoat['alpha'] == [] and texture_info[1] == '1001':
+ texcoat['alpha'].append(texture_info[3])
+ create_nodes = True
create_group_node = True
-
+
if(create_nodes):
coat3D = bpy.context.scene.coat3D
path3b_n = coat3D.exchangedir
@@ -313,9 +297,9 @@ def readtexturefolder(objekti, mat_list, texturelist, is_new, udim_textures): #r
objekti.coat3D.applink_3b_path = line
export_file.close()
coat3D.remove_path = True
- createnodes(index_mat, texcoat, create_group_node, tile_list, objekti, ind, is_new)
+ createnodes(index_mat, texcoat, create_group_node, objekti, ind, is_new, udim_textures)
-def createnodes(active_mat,texcoat, create_group_node, tile_list, objekti, ind, is_new): # Creates new nodes and link textures into them
+def createnodes(active_mat,texcoat, create_group_node, objekti, ind, is_new, udim_textures): # Creates new nodes and link textures into them
bring_color = True # Meaning of these is to check if we can only update textures or do we need to create new nodes
bring_metalness = True
bring_roughness = True
@@ -325,9 +309,13 @@ def createnodes(active_mat,texcoat, create_group_node, tile_list, objekti, ind,
bring_AO = True
bring_alpha = True
+ active_mat.material.show_transparent_back = False # HACK FOR BLENDER BUG
+
coat3D = bpy.context.scene.coat3D
coatMat = active_mat.material
+ coatMat.blend_method = 'BLEND'
+
if(coatMat.use_nodes == False):
coatMat.use_nodes = True
act_material = coatMat.node_tree
@@ -456,171 +444,43 @@ def createnodes(active_mat,texcoat, create_group_node, tile_list, objekti, ind,
if(out_mat.inputs['Surface'].is_linked == True):
if(bring_color == True and texcoat['color'] != []):
CreateTextureLine(data['color'], act_material, main_mat, texcoat, coat3D, notegroup,
- main_material, applink_tree, out_mat, coatMat, tile_list, objekti, ind, is_new)
+ main_material, applink_tree, out_mat, coatMat, objekti, ind, is_new, udim_textures)
if(bring_metalness == True and texcoat['metalness'] != []):
CreateTextureLine(data['metalness'], act_material, main_mat, texcoat, coat3D, notegroup,
- main_material, applink_tree, out_mat, coatMat, tile_list, objekti, ind, is_new)
+ main_material, applink_tree, out_mat, coatMat, objekti, ind, is_new, udim_textures)
if(bring_roughness == True and texcoat['rough'] != []):
CreateTextureLine(data['rough'], act_material, main_mat, texcoat, coat3D, notegroup,
- main_material, applink_tree, out_mat, coatMat,tile_list, objekti, ind, is_new)
+ main_material, applink_tree, out_mat, coatMat, objekti, ind, is_new, udim_textures)
if(bring_normal == True and texcoat['nmap'] != []):
CreateTextureLine(data['nmap'], act_material, main_mat, texcoat, coat3D, notegroup,
- main_material, applink_tree, out_mat, coatMat, tile_list, objekti, ind, is_new)
+ main_material, applink_tree, out_mat, coatMat, objekti, ind, is_new, udim_textures)
if (bring_emissive == True and texcoat['emissive'] != []):
CreateTextureLine(data['emissive'], act_material, main_mat, texcoat, coat3D, notegroup,
- main_material, applink_tree, out_mat, coatMat, tile_list, objekti, ind, is_new)
+ main_material, applink_tree, out_mat, coatMat, objekti, ind, is_new, udim_textures)
if (bring_displacement == True and texcoat['displacement'] != []):
CreateTextureLine(data['displacement'], act_material, main_mat, texcoat, coat3D, notegroup,
- main_material, applink_tree, out_mat, coatMat, tile_list, objekti, ind, is_new)
+ main_material, applink_tree, out_mat, coatMat, objekti, ind, is_new, udim_textures)
if (bring_alpha == True and texcoat['alpha'] != []):
CreateTextureLine(data['alpha'], act_material, main_mat, texcoat, coat3D, notegroup,
- main_material, applink_tree, out_mat, coatMat, tile_list, objekti, ind, is_new)
-
-
-def CreateTextureLine(type, act_material, main_mat, texcoat, coat3D, notegroup, main_material, applink_tree, out_mat, coatMat, tile_list, objekti, ind, is_new):
-
- if(tile_list):
- texture_name = coatMat.name + '_' + type['name']
- texture_tree = bpy.data.node_groups.new(type="ShaderNodeTree", name=texture_name)
- texture_tree.outputs.new("NodeSocketColor", "Color")
- texture_tree.outputs.new("NodeSocketColor", "Alpha")
- texture_node_tree = act_material.nodes.new('ShaderNodeGroup')
- texture_node_tree.name = '3DC_' + type['name']
- texture_node_tree.node_tree = texture_tree
- texture_node_tree.location[0] = type['node_location'][0]
- texture_node_tree.location[0] -= 400
- texture_node_tree.location[1] = type['node_location'][1]
- notegroupend = texture_tree.nodes.new('NodeGroupOutput')
-
- count = len(tile_list)
- uv_loc = [-1400, 200]
- map_loc = [-1100, 200]
- tex_loc = [-700, 200]
- mix_loc = [-400, 100]
-
- nodes = []
+ main_material, applink_tree, out_mat, coatMat, objekti, ind, is_new, udim_textures)
- for index, tile in enumerate(tile_list):
-
- tex_img_node = texture_tree.nodes.new('ShaderNodeTexImage')
-
- for ind, tex_index in enumerate(texcoat[type['name']]):
- if(tex_index[0] == tile):
- tex_img_node.image = bpy.data.images.load(texcoat[type['name']][ind][1])
- break
- tex_img_node.location = tex_loc
-
- if tex_img_node.image and type['colorspace'] != 'color':
- tex_img_node.image.colorspace_settings.is_data = True
-
- tex_uv_node = texture_tree.nodes.new('ShaderNodeUVMap')
- tex_uv_node.location = uv_loc
- if(is_new):
- tex_uv_node.uv_map = objekti.data.uv_layers[ind].name
- else:
- tex_uv_node.uv_map = objekti.data.uv_layers[0].name
-
- map_node = texture_tree.nodes.new('ShaderNodeMapping')
- map_node.location = map_loc
- map_node.name = '3DC_' + tile
- map_node.vector_type = 'TEXTURE'
-
- tile_int_x = int(tile[3])
- tile_int_y = int(tile[2])
-
- min_node = texture_tree.nodes.new('ShaderNodeVectorMath')
- min_node.operation = "MINIMUM"
- min_node.inputs[1].default_value[0] = tile_int_x - 1
- min_node.inputs[1].default_value[1] = tile_int_y
-
- max_node = texture_tree.nodes.new('ShaderNodeVectorMath')
- max_node.operation = "MAXIMUM"
- max_node.inputs[1].default_value[0] = tile_int_x
- max_node.inputs[1].default_value[1] = tile_int_y + 1
-
-
- if(index == 0):
- nodes.append(tex_img_node.name)
- if(count == 1):
- texture_tree.links.new(tex_img_node.outputs[0], notegroupend.inputs[0])
- texture_tree.links.new(tex_img_node.outputs[1], notegroupend.inputs[1])
-
- if(index == 1):
- mix_node = texture_tree.nodes.new('ShaderNodeMixRGB')
- mix_node.blend_type = 'ADD'
- mix_node.inputs[0].default_value = 1
- mix_node.location = mix_loc
- mix_loc[1] -= 300
- texture_tree.links.new(tex_img_node.outputs[0], mix_node.inputs[2])
- texture_tree.links.new(texture_tree.nodes[nodes[0]].outputs[0], mix_node.inputs[1])
- mix_node_alpha = texture_tree.nodes.new('ShaderNodeMath')
- mix_node_alpha.location = mix_loc
- mix_loc[1] -= 200
- texture_tree.links.new(tex_img_node.outputs[1], mix_node_alpha.inputs[1])
- texture_tree.links.new(texture_tree.nodes[nodes[0]].outputs[1], mix_node_alpha.inputs[0])
- nodes.clear()
- nodes.append(tex_img_node.name)
- nodes.append(mix_node.name)
- nodes.append(mix_node_alpha.name)
-
-
- elif(index > 1):
- mix_node = texture_tree.nodes.new('ShaderNodeMixRGB')
- mix_node.blend_type = 'ADD'
- mix_node.inputs[0].default_value = 1
- mix_node.location = mix_loc
- mix_loc[1] -= 300
- texture_tree.links.new(texture_tree.nodes[nodes[1]].outputs[0], mix_node.inputs[1])
- texture_tree.links.new(tex_img_node.outputs[0], mix_node.inputs[2])
- mix_node_alpha = texture_tree.nodes.new('ShaderNodeMath')
- mix_node_alpha.location = mix_loc
- mix_loc[1] -= 200
- texture_tree.links.new(texture_tree.nodes[nodes[2]].outputs[0], mix_node_alpha.inputs[0])
- texture_tree.links.new(tex_img_node.outputs[1], mix_node_alpha.inputs[1])
-
- nodes.clear()
- nodes.append(tex_img_node.name)
- nodes.append(mix_node.name)
- nodes.append(mix_node_alpha.name)
-
- tex_loc[1] -= 300
- uv_loc[1] -= 300
- map_loc[1] -= 300
-
- texture_tree.links.new(tex_uv_node.outputs[0], map_node.inputs[0])
- texture_tree.links.new(map_node.outputs[0], min_node.inputs[0])
- texture_tree.links.new(min_node.outputs['Vector'], max_node.inputs[0])
- texture_tree.links.new(max_node.outputs['Vector'], tex_img_node.inputs[0])
-
- if(count > 1):
- texture_tree.links.new(mix_node.outputs[0], notegroupend.inputs[0])
- texture_tree.links.new(mix_node_alpha.outputs[0], notegroupend.inputs[1])
-
- if(tile_list):
- node = texture_node_tree
- if(texcoat['alpha'] != []):
- if (type['name'] == 'color'):
- act_material.links.new(node.outputs[1], notegroup.inputs[8])
- else:
- if(type['name'] == 'alpha'):
- act_material.links.new(node.outputs[1], notegroup.inputs[8])
+def CreateTextureLine(type, act_material, main_mat, texcoat, coat3D, notegroup, main_material, applink_tree, out_mat, coatMat, objekti, ind, is_new, udim_textures):
+ node = act_material.nodes.new('ShaderNodeTexImage')
+ uv_node = act_material.nodes.new('ShaderNodeUVMap')
+ if (is_new):
+ uv_node.uv_map = objekti.data.uv_layers[ind].name
else:
- node = act_material.nodes.new('ShaderNodeTexImage')
- uv_node = act_material.nodes.new('ShaderNodeUVMap')
- if (is_new):
- uv_node.uv_map = objekti.data.uv_layers[ind].name
- else:
- uv_node.uv_map = objekti.data.uv_layers[0].name
- act_material.links.new(uv_node.outputs[0], node.inputs[0])
- uv_node.use_custom_color = True
- uv_node.color = (type['node_color'][0], type['node_color'][1], type['node_color'][2])
+ uv_node.uv_map = objekti.data.uv_layers[0].name
+ act_material.links.new(uv_node.outputs[0], node.inputs[0])
+ uv_node.use_custom_color = True
+ uv_node.color = (type['node_color'][0], type['node_color'][1], type['node_color'][2])
node.use_custom_color = True
node.color = (type['node_color'][0],type['node_color'][1],type['node_color'][2])
@@ -632,8 +492,7 @@ def CreateTextureLine(type, act_material, main_mat, texcoat, coat3D, notegroup,
normal_node.color = (type['node_color'][0], type['node_color'][1], type['node_color'][2])
node.location = -671, -510
- if(tile_list == []):
- uv_node.location = -750, -600
+ uv_node.location = -750, -600
normal_node.location = -350, -350
normal_node.name = '3DC_normalnode'
@@ -654,21 +513,26 @@ def CreateTextureLine(type, act_material, main_mat, texcoat, coat3D, notegroup,
if(input_color != -1):
break
- if (tile_list == []):
+ load_image = True
- load_image = True
+ for image in bpy.data.images:
- for image in bpy.data.images:
- if(texcoat[type['name']][0] == image.filepath):
- load_image = False
- node.image = image
- break
+ if(texcoat[type['name']][0] == image.filepath):
+ load_image = False
+ node.image = image
- if (load_image):
- node.image = bpy.data.images.load(texcoat[type['name']][0])
-
- if node.image and type['colorspace'] == 'noncolor':
- node.image.colorspace_settings.is_data = True
+ break
+
+ if (load_image):
+ print('load_image', texcoat[type['name']][0])
+
+ node.image = bpy.data.images.load(texcoat[type['name']][0])
+ if(udim_textures):
+ node.image.source = 'TILED'
+
+
+ if node.image and type['colorspace'] == 'noncolor':
+ node.image.colorspace_settings.is_data = True
if (coat3D.createnodes):
@@ -725,10 +589,10 @@ def CreateTextureLine(type, act_material, main_mat, texcoat, coat3D, notegroup,
if(material.name == '3DC_Emission'):
main_material.links.new(applink_tree.outputs[type['input']], material.inputs[0])
break
- if(tile_list == []):
- uv_node.location = node.location
- uv_node.location[0] -= 300
- uv_node.location[1] -= 200
+
+ uv_node.location = node.location
+ uv_node.location[0] -= 300
+ uv_node.location[1] -= 200
else:
node.location = type['node_location'][0], type['node_location'][1]
@@ -824,9 +688,9 @@ def matlab(objekti,mat_list,texturelist,is_new):
''' Check if bind textures with UVs or Materials '''
if(texturelist != []):
-
+
udim_textures = False
- if texturelist[0][0].startswith('100'):
+ if texturelist[0][0].startswith('100') and len(texturelist[0][0]) == 4:
udim_textures = True
if(udim_textures == False):
diff --git a/io_coat3D/texVR.py b/io_coat3D/texVR.py
new file mode 100644
index 00000000..520b3084
--- /dev/null
+++ b/io_coat3D/texVR.py
@@ -0,0 +1,846 @@
+# ***** BEGIN GPL LICENSE BLOCK *****
+#
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License
+# as published by the Free Software Foundation; either version 2
+# of the License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+#
+# ***** END GPL LICENCE BLOCK *****
+
+import bpy
+import os
+import re
+import json
+
+def find_index(objekti):
+
+ luku = 0
+ for tex in objekti.active_material.texture_slots:
+ if(not(hasattr(tex,'texture'))):
+ break
+ luku = luku +1
+ return luku
+
+
+def RemoveFbxNodes(objekti):
+ Node_Tree = objekti.active_material.node_tree
+ for node in Node_Tree.nodes:
+ if node.type != 'OUTPUT_MATERIAL':
+ Node_Tree.nodes.remove(node)
+ else:
+ output = node
+ output.location = 340,400
+ Prin_mat = Node_Tree.nodes.new(type="ShaderNodeBsdfPrincipled")
+ Prin_mat.location = 13, 375
+
+ Node_Tree.links.new(Prin_mat.outputs[0], output.inputs[0])
+
+
+def UVTiling(objekti, index, texturelist):
+ """ Checks what Tiles are linked with Material """
+
+ objekti.coat3D.applink_scale = objekti.scale
+ tiles_index = []
+ tile_number =''
+ for poly in objekti.data.polygons:
+ if (poly.material_index == (index)):
+ loop_index = poly.loop_indices[0]
+ uv_x = objekti.data.uv_layers.active.data[loop_index].uv[0]
+ if(uv_x >= 0 and uv_x <=1):
+ tile_number_x = '1'
+ elif (uv_x >= 1 and uv_x <= 2):
+ tile_number_x = '2'
+ elif (uv_x >= 2 and uv_x <= 3):
+ tile_number_x = '3'
+ elif (uv_x >= 3 and uv_x <= 4):
+ tile_number_x = '4'
+ elif (uv_x >= 4 and uv_x <= 5):
+ tile_number_x = '5'
+ elif (uv_x >= 5 and uv_x <= 6):
+ tile_number_x = '6'
+ elif (uv_x >= 6 and uv_x <= 7):
+ tile_number_x = '7'
+ elif (uv_x >= 7 and uv_x <= 8):
+ tile_number_x = '8'
+ elif (uv_x >= 8 and uv_x <= 9):
+ tile_number_x = '9'
+
+ uv_y = objekti.data.uv_layers.active.data[loop_index].uv[1]
+ if (uv_y >= 0 and uv_y <= 1):
+ tile_number_y = '0'
+ elif (uv_y >= 1 and uv_y <= 2):
+ tile_number_y = '1'
+ elif (uv_x >= 2 and uv_y <= 3):
+ tile_number_y = '2'
+ elif (uv_x >= 3 and uv_y <= 4):
+ tile_number_y = '3'
+ elif (uv_x >= 4 and uv_y <= 5):
+ tile_number_y = '4'
+ elif (uv_x >= 5 and uv_y <= 6):
+ tile_number_y = '5'
+ elif (uv_x >= 6 and uv_y <= 7):
+ tile_number_y = '6'
+ elif (uv_x >= 7 and uv_y <= 8):
+ tile_number_y = '7'
+ elif (uv_x >= 8 and uv_y <= 9):
+ tile_number_y = '8'
+
+ tile_number = '10' + tile_number_y + tile_number_x
+
+ if tile_number not in tiles_index:
+ tiles_index.append(tile_number)
+
+ return tiles_index
+
+def updatetextures(objekti): # Update 3DC textures
+
+ for index_mat in objekti.material_slots:
+
+ for node in index_mat.material.node_tree.nodes:
+ if (node.type == 'TEX_IMAGE'):
+ if (node.name == '3DC_color'):
+ node.image.reload()
+ elif (node.name == '3DC_metalness'):
+ node.image.reload()
+ elif (node.name == '3DC_rough'):
+ node.image.reload()
+ elif (node.name == '3DC_nmap'):
+ node.image.reload()
+ elif (node.name == '3DC_displacement'):
+ node.image.reload()
+ elif (node.name == '3DC_emissive'):
+ node.image.reload()
+ elif (node.name == '3DC_AO'):
+ node.image.reload()
+ elif (node.name == '3DC_alpha'):
+ node.image.reload()
+
+ for index_node_group in bpy.data.node_groups:
+
+ for node in index_node_group.nodes:
+ if (node.type == 'TEX_IMAGE'):
+ if (node.name == '3DC_color'):
+ node.image.reload()
+ elif (node.name == '3DC_metalness'):
+ node.image.reload()
+ elif (node.name == '3DC_rough'):
+ node.image.reload()
+ elif (node.name == '3DC_nmap'):
+ node.image.reload()
+ elif (node.name == '3DC_displacement'):
+ node.image.reload()
+ elif (node.name == '3DC_emissive'):
+ node.image.reload()
+ elif (node.name == '3DC_AO'):
+ node.image.reload()
+ elif (node.name == '3DC_alpha'):
+ node.image.reload()
+
+
+def readtexturefolder(objekti, mat_list, texturelist, is_new, udim_textures): #read textures from texture file
+
+ # Let's check are we UVSet or MATERIAL modee
+ create_nodes = False
+ for ind, index_mat in enumerate(objekti.material_slots):
+
+ if(udim_textures):
+ tile_list = UVTiling(objekti,ind, texturelist)
+ else:
+ tile_list = []
+
+ texcoat = {}
+ texcoat['color'] = []
+ texcoat['ao'] = []
+ texcoat['rough'] = []
+ texcoat['metalness'] = []
+ texcoat['nmap'] = []
+ texcoat['emissive'] = []
+ texcoat['emissive_power'] = []
+ texcoat['displacement'] = []
+ texcoat['alpha'] = []
+
+ create_group_node = False
+ if(udim_textures == False):
+ for slot_index, texture_info in enumerate(texturelist):
+
+ uv_MODE_mat = 'MAT'
+ for index, layer in enumerate(objekti.data.uv_layers):
+ if(layer.name == texturelist[slot_index][0]):
+ uv_MODE_mat = 'UV'
+ break
+
+ print('aaa')
+ print(texture_info[0])
+ print(index_mat)
+ if(texture_info[0] == index_mat.name):
+ if texture_info[2] == 'color' or texture_info[2] == 'diffuse':
+ if(index_mat.material.coat3D_diffuse):
+
+ texcoat['color'].append(texture_info[3])
+ create_nodes = True
+ else:
+ os.remove(texture_info[3])
+
+ elif texture_info[2] == 'metalness' or texture_info[2] == 'specular' or texture_info[2] == 'reflection':
+ if (index_mat.material.coat3D_metalness):
+ texcoat['metalness'].append(texture_info[3])
+ create_nodes = True
+ else:
+ os.remove(texture_info[3])
+
+ elif texture_info[2] == 'rough' or texture_info[2] == 'roughness':
+ if (index_mat.material.coat3D_roughness):
+ texcoat['rough'].append(texture_info[3])
+ create_nodes = True
+ else:
+ os.remove(texture_info[3])
+
+ elif texture_info[2] == 'nmap' or texture_info[2] == 'normalmap' or texture_info[2] == 'normal_map' or texture_info[2] == 'normal':
+ if (index_mat.material.coat3D_normal):
+ texcoat['nmap'].append(texture_info[3])
+ create_nodes = True
+ else:
+ os.remove(texture_info[3])
+
+ elif texture_info[2] == 'emissive':
+ if (index_mat.material.coat3D_emissive):
+ texcoat['emissive'].append(texture_info[3])
+ create_nodes = True
+ else:
+ os.remove(texture_info[3])
+
+ elif texture_info[2] == 'emissive_power':
+ if (index_mat.material.coat3D_emissive):
+ texcoat['emissive_power'].append(texture_info[3])
+ create_nodes = True
+ else:
+ os.remove(texture_info[3])
+
+ elif texture_info[2] == 'ao':
+ if (index_mat.material.coat3D_ao):
+ texcoat['ao'].append(texture_info[3])
+ create_nodes = True
+ else:
+ os.remove(texture_info[3])
+
+ elif texture_info[2].startswith('displacement'):
+ if (index_mat.material.coat3D_displacement):
+ texcoat['displacement'].append(texture_info[3])
+ create_nodes = True
+ else:
+ os.remove(texture_info[3])
+
+ elif texture_info[2] == 'alpha' or texture_info[2] == 'opacity':
+ if (index_mat.material.coat3D_alpha):
+ texcoat['alpha'].append(texture_info[3])
+ create_nodes = True
+ else:
+ os.remove(texture_info[3])
+
+ create_group_node = True
+
+ else:
+ for texture_info in texturelist:
+
+ if texture_info[2] == 'color' or texture_info[2] == 'diffuse':
+ texcoat['color'].append([texture_info[0],texture_info[3]])
+ create_nodes = True
+ elif texture_info[2] == 'metalness' or texture_info[2] == 'specular' or texture_info[
+ 2] == 'reflection':
+ texcoat['metalness'].append([texture_info[0],texture_info[3]])
+ create_nodes = True
+ elif texture_info[2] == 'rough' or texture_info[2] == 'roughness':
+ texcoat['rough'].append([texture_info[0],texture_info[3]])
+ create_nodes = True
+ elif texture_info[2] == 'nmap' or texture_info[2] == 'normalmap' or texture_info[
+ 2] == 'normal_map' or texture_info[2] == 'normal':
+ texcoat['nmap'].append([texture_info[0],texture_info[3]])
+ create_nodes = True
+ elif texture_info[2] == 'emissive':
+ texcoat['emissive'].append([texture_info[0],texture_info[3]])
+ create_nodes = True
+ elif texture_info[2] == 'emissive_power':
+ texcoat['emissive_power'].append([texture_info[0],texture_info[3]])
+ create_nodes = True
+ elif texture_info[2] == 'ao':
+ texcoat['ao'].append([texture_info[0],texture_info[3]])
+ create_nodes = True
+ elif texture_info[2].startswith('displacement'):
+ texcoat['displacement'].append([texture_info[0],texture_info[3]])
+ create_nodes = True
+ if texture_info[2] == 'alpha' or texture_info[2] == 'opacity':
+ texcoat['alpha'].append([texture_info[0], texture_info[3]])
+ create_nodes = True
+ create_group_node = True
+
+ if(create_nodes):
+ coat3D = bpy.context.scene.coat3D
+ path3b_n = coat3D.exchangedir
+ path3b_n += ('%slast_saved_3b_file.txt' % (os.sep))
+
+ if (os.path.isfile(path3b_n)):
+ export_file = open(path3b_n)
+ for line in export_file:
+ objekti.coat3D.applink_3b_path = line
+ export_file.close()
+ coat3D.remove_path = True
+ createnodes(index_mat, texcoat, create_group_node, tile_list, objekti, ind, is_new)
+
+def createnodes(active_mat,texcoat, create_group_node, tile_list, objekti, ind, is_new): # Creates new nodes and link textures into them
+ bring_color = True # Meaning of these is to check if we can only update textures or do we need to create new nodes
+ bring_metalness = True
+ bring_roughness = True
+ bring_normal = True
+ bring_displacement = True
+ bring_emissive = True
+ bring_AO = True
+ bring_alpha = True
+
+ coat3D = bpy.context.scene.coat3D
+ coatMat = active_mat.material
+
+ if(coatMat.use_nodes == False):
+ coatMat.use_nodes = True
+
+ act_material = coatMat.node_tree
+ main_material = coatMat.node_tree
+ applink_group_node = False
+
+ # First go through all image nodes and let's check if it starts with 3DC and reload if needed
+
+ for node in coatMat.node_tree.nodes:
+ if (node.type == 'OUTPUT_MATERIAL'):
+ out_mat = node
+ break
+
+ for node in act_material.nodes:
+ if(node.name == '3DC_Applink' and node.type == 'GROUP'):
+ applink_group_node = True
+ act_material = node.node_tree
+ applink_tree = node
+ break
+
+ for node in act_material.nodes:
+ if (node.type != 'GROUP'):
+ if (node.type != 'GROUP_OUTPUT'):
+ if (node.type == 'TEX_IMAGE'):
+ if (node.name == '3DC_color'):
+ bring_color = False
+ elif (node.name == '3DC_metalness'):
+ bring_metalness = False
+ elif (node.name == '3DC_rough'):
+ bring_roughness = False
+ elif (node.name == '3DC_nmap'):
+ bring_normal = False
+ elif (node.name == '3DC_displacement'):
+ bring_displacement = False
+ elif (node.name == '3DC_emissive'):
+ bring_emissive = False
+ elif (node.name == '3DC_AO'):
+ bring_AO = False
+ elif (node.name == '3DC_alpha'):
+ bring_alpha = False
+ elif (node.type == 'GROUP' and node.name.startswith('3DC_')):
+ if (node.name == '3DC_color'):
+ bring_color = False
+ elif (node.name == '3DC_metalness'):
+ bring_metalness = False
+ elif (node.name == '3DC_rough'):
+ bring_roughness = False
+ elif (node.name == '3DC_nmap'):
+ bring_normal = False
+ elif (node.name == '3DC_displacement'):
+ bring_displacement = False
+ elif (node.name == '3DC_emissive'):
+ bring_emissive = False
+ elif (node.name == '3DC_AO'):
+ bring_AO = False
+ elif (node.name == '3DC_alpha'):
+ bring_alpha = False
+
+ #Let's start to build new node tree. Let's start linking with Material Output
+
+ if(create_group_node):
+ if(applink_group_node == False):
+ main_mat2 = out_mat.inputs['Surface'].links[0].from_node
+ for input_ind in main_mat2.inputs:
+ if(input_ind.is_linked):
+ main_mat3 = input_ind.links[0].from_node
+ if(main_mat3.type == 'BSDF_PRINCIPLED'):
+ main_mat = main_mat3
+
+ group_tree = bpy.data.node_groups.new( type="ShaderNodeTree", name="3DC_Applink")
+ group_tree.outputs.new("NodeSocketColor", "Color")
+ group_tree.outputs.new("NodeSocketColor", "Metallic")
+ group_tree.outputs.new("NodeSocketColor", "Roughness")
+ group_tree.outputs.new("NodeSocketVector", "Normal map")
+ group_tree.outputs.new("NodeSocketColor", "Emissive")
+ group_tree.outputs.new("NodeSocketColor", "Displacement")
+ group_tree.outputs.new("NodeSocketColor", "Emissive Power")
+ group_tree.outputs.new("NodeSocketColor", "AO")
+ group_tree.outputs.new("NodeSocketColor", "Alpha")
+ applink_tree = act_material.nodes.new('ShaderNodeGroup')
+ applink_tree.name = '3DC_Applink'
+ applink_tree.node_tree = group_tree
+ applink_tree.location = -400, -100
+ act_material = group_tree
+ notegroup = act_material.nodes.new('NodeGroupOutput')
+ notegroup.location = 220, -260
+
+ if(texcoat['emissive'] != []):
+ from_output = out_mat.inputs['Surface'].links[0].from_node
+ if(from_output.type == 'BSDF_PRINCIPLED'):
+ add_shader = main_material.nodes.new('ShaderNodeAddShader')
+ emission_shader = main_material.nodes.new('ShaderNodeEmission')
+
+ emission_shader.name = '3DC_Emission'
+
+ add_shader.location = 420, 110
+ emission_shader.location = 70, -330
+ out_mat.location = 670, 130
+
+ main_material.links.new(from_output.outputs[0], add_shader.inputs[0])
+ main_material.links.new(add_shader.outputs[0], out_mat.inputs[0])
+ main_material.links.new(emission_shader.outputs[0], add_shader.inputs[1])
+ main_mat = from_output
+ else:
+ main_mat = out_mat.inputs['Surface'].links[0].from_node
+
+ else:
+ main_mat = out_mat.inputs['Surface'].links[0].from_node
+ index = 0
+ for node in coatMat.node_tree.nodes:
+ if (node.type == 'GROUP' and node.name =='3DC_Applink'):
+ for in_node in node.node_tree.nodes:
+ if(in_node.type == 'GROUP_OUTPUT'):
+ notegroup = in_node
+ index = 1
+ break
+ if(index == 1):
+ break
+
+ # READ DATA.JSON FILE
+
+ json_address = os.path.dirname(bpy.app.binary_path) + os.sep + str(bpy.app.version[0]) + '.' + str(bpy.app.version[1]) + os.sep + 'scripts' + os.sep + 'addons' + os.sep + 'io_coat3D' + os.sep + 'data.json'
+ with open(json_address, encoding='utf-8') as data_file:
+ data = json.loads(data_file.read())
+
+ if(out_mat.inputs['Surface'].is_linked == True):
+ if(bring_color == True and texcoat['color'] != []):
+ CreateTextureLine(data['color'], act_material, main_mat, texcoat, coat3D, notegroup,
+ main_material, applink_tree, out_mat, coatMat, tile_list, objekti, ind, is_new)
+
+ if(bring_metalness == True and texcoat['metalness'] != []):
+ CreateTextureLine(data['metalness'], act_material, main_mat, texcoat, coat3D, notegroup,
+ main_material, applink_tree, out_mat, coatMat, tile_list, objekti, ind, is_new)
+
+ if(bring_roughness == True and texcoat['rough'] != []):
+ CreateTextureLine(data['rough'], act_material, main_mat, texcoat, coat3D, notegroup,
+ main_material, applink_tree, out_mat, coatMat,tile_list, objekti, ind, is_new)
+
+ if(bring_normal == True and texcoat['nmap'] != []):
+ CreateTextureLine(data['nmap'], act_material, main_mat, texcoat, coat3D, notegroup,
+ main_material, applink_tree, out_mat, coatMat, tile_list, objekti, ind, is_new)
+
+ if (bring_emissive == True and texcoat['emissive'] != []):
+ CreateTextureLine(data['emissive'], act_material, main_mat, texcoat, coat3D, notegroup,
+ main_material, applink_tree, out_mat, coatMat, tile_list, objekti, ind, is_new)
+
+ if (bring_displacement == True and texcoat['displacement'] != []):
+ CreateTextureLine(data['displacement'], act_material, main_mat, texcoat, coat3D, notegroup,
+ main_material, applink_tree, out_mat, coatMat, tile_list, objekti, ind, is_new)
+ if (bring_alpha == True and texcoat['alpha'] != []):
+ CreateTextureLine(data['alpha'], act_material, main_mat, texcoat, coat3D, notegroup,
+ main_material, applink_tree, out_mat, coatMat, tile_list, objekti, ind, is_new)
+
+
+def CreateTextureLine(type, act_material, main_mat, texcoat, coat3D, notegroup, main_material, applink_tree, out_mat, coatMat, tile_list, objekti, ind, is_new):
+
+ if(tile_list):
+ texture_name = coatMat.name + '_' + type['name']
+ texture_tree = bpy.data.node_groups.new(type="ShaderNodeTree", name=texture_name)
+ texture_tree.outputs.new("NodeSocketColor", "Color")
+ texture_tree.outputs.new("NodeSocketColor", "Alpha")
+ texture_node_tree = act_material.nodes.new('ShaderNodeGroup')
+ texture_node_tree.name = '3DC_' + type['name']
+ texture_node_tree.node_tree = texture_tree
+ texture_node_tree.location[0] = type['node_location'][0]
+ texture_node_tree.location[0] -= 400
+ texture_node_tree.location[1] = type['node_location'][1]
+ notegroupend = texture_tree.nodes.new('NodeGroupOutput')
+
+ count = len(tile_list)
+ uv_loc = [-1400, 200]
+ map_loc = [-1100, 200]
+ tex_loc = [-700, 200]
+ mix_loc = [-400, 100]
+
+ nodes = []
+
+ for index, tile in enumerate(tile_list):
+
+ tex_img_node = texture_tree.nodes.new('ShaderNodeTexImage')
+
+ for ind, tex_index in enumerate(texcoat[type['name']]):
+ if(tex_index[0] == tile):
+ tex_img_node.image = bpy.data.images.load(texcoat[type['name']][ind][1])
+ break
+ tex_img_node.location = tex_loc
+
+ if tex_img_node.image and type['colorspace'] != 'color':
+ tex_img_node.image.colorspace_settings.is_data = True
+
+ tex_uv_node = texture_tree.nodes.new('ShaderNodeUVMap')
+ tex_uv_node.location = uv_loc
+ if(is_new):
+ tex_uv_node.uv_map = objekti.data.uv_layers[ind].name
+ else:
+ tex_uv_node.uv_map = objekti.data.uv_layers[0].name
+
+ map_node = texture_tree.nodes.new('ShaderNodeMapping')
+ map_node.location = map_loc
+ map_node.name = '3DC_' + tile
+ map_node.vector_type = 'TEXTURE'
+
+ tile_int_x = int(tile[3])
+ tile_int_y = int(tile[2])
+
+ min_node = texture_tree.nodes.new('ShaderNodeVectorMath')
+ min_node.operation = "MINIMUM"
+ min_node.inputs[1].default_value[0] = tile_int_x - 1
+ min_node.inputs[1].default_value[1] = tile_int_y
+
+ max_node = texture_tree.nodes.new('ShaderNodeVectorMath')
+ max_node.operation = "MAXIMUM"
+ max_node.inputs[1].default_value[0] = tile_int_x
+ max_node.inputs[1].default_value[1] = tile_int_y + 1
+
+
+ if(index == 0):
+ nodes.append(tex_img_node.name)
+ if(count == 1):
+ texture_tree.links.new(tex_img_node.outputs[0], notegroupend.inputs[0])
+ texture_tree.links.new(tex_img_node.outputs[1], notegroupend.inputs[1])
+
+ if(index == 1):
+ mix_node = texture_tree.nodes.new('ShaderNodeMixRGB')
+ mix_node.blend_type = 'ADD'
+ mix_node.inputs[0].default_value = 1
+ mix_node.location = mix_loc
+ mix_loc[1] -= 300
+ texture_tree.links.new(tex_img_node.outputs[0], mix_node.inputs[2])
+ texture_tree.links.new(texture_tree.nodes[nodes[0]].outputs[0], mix_node.inputs[1])
+ mix_node_alpha = texture_tree.nodes.new('ShaderNodeMath')
+ mix_node_alpha.location = mix_loc
+ mix_loc[1] -= 200
+ texture_tree.links.new(tex_img_node.outputs[1], mix_node_alpha.inputs[1])
+ texture_tree.links.new(texture_tree.nodes[nodes[0]].outputs[1], mix_node_alpha.inputs[0])
+ nodes.clear()
+ nodes.append(tex_img_node.name)
+ nodes.append(mix_node.name)
+ nodes.append(mix_node_alpha.name)
+
+
+ elif(index > 1):
+ mix_node = texture_tree.nodes.new('ShaderNodeMixRGB')
+ mix_node.blend_type = 'ADD'
+ mix_node.inputs[0].default_value = 1
+ mix_node.location = mix_loc
+ mix_loc[1] -= 300
+ texture_tree.links.new(texture_tree.nodes[nodes[1]].outputs[0], mix_node.inputs[1])
+ texture_tree.links.new(tex_img_node.outputs[0], mix_node.inputs[2])
+ mix_node_alpha = texture_tree.nodes.new('ShaderNodeMath')
+ mix_node_alpha.location = mix_loc
+ mix_loc[1] -= 200
+ texture_tree.links.new(texture_tree.nodes[nodes[2]].outputs[0], mix_node_alpha.inputs[0])
+ texture_tree.links.new(tex_img_node.outputs[1], mix_node_alpha.inputs[1])
+
+ nodes.clear()
+ nodes.append(tex_img_node.name)
+ nodes.append(mix_node.name)
+ nodes.append(mix_node_alpha.name)
+
+ tex_loc[1] -= 300
+ uv_loc[1] -= 300
+ map_loc[1] -= 300
+
+ texture_tree.links.new(tex_uv_node.outputs[0], map_node.inputs[0])
+ texture_tree.links.new(map_node.outputs[0], min_node.inputs[0])
+ texture_tree.links.new(min_node.outputs['Vector'], max_node.inputs[0])
+ texture_tree.links.new(max_node.outputs['Vector'], tex_img_node.inputs[0])
+
+ if(count > 1):
+ texture_tree.links.new(mix_node.outputs[0], notegroupend.inputs[0])
+ texture_tree.links.new(mix_node_alpha.outputs[0], notegroupend.inputs[1])
+
+ if(tile_list):
+ node = texture_node_tree
+ if(texcoat['alpha'] != []):
+ if (type['name'] == 'color'):
+ act_material.links.new(node.outputs[1], notegroup.inputs[8])
+ else:
+ if(type['name'] == 'alpha'):
+ act_material.links.new(node.outputs[1], notegroup.inputs[8])
+
+
+ else:
+ node = act_material.nodes.new('ShaderNodeTexImage')
+ uv_node = act_material.nodes.new('ShaderNodeUVMap')
+
+ uv_node.uv_map = objekti.data.uv_layers[0].name
+ act_material.links.new(uv_node.outputs[0], node.inputs[0])
+ uv_node.use_custom_color = True
+ uv_node.color = (type['node_color'][0], type['node_color'][1], type['node_color'][2])
+
+ node.use_custom_color = True
+ node.color = (type['node_color'][0],type['node_color'][1],type['node_color'][2])
+
+
+ if type['name'] == 'nmap':
+ normal_node = act_material.nodes.new('ShaderNodeNormalMap')
+ normal_node.use_custom_color = True
+ normal_node.color = (type['node_color'][0], type['node_color'][1], type['node_color'][2])
+
+ node.location = -671, -510
+ if(tile_list == []):
+ uv_node.location = -750, -600
+ normal_node.location = -350, -350
+ normal_node.name = '3DC_normalnode'
+
+ elif type['name'] == 'displacement':
+ disp_node = main_material.nodes.new('ShaderNodeDisplacement')
+
+ node.location = -630, -1160
+ disp_node.location = 90, -460
+ disp_node.inputs[2].default_value = 0.1
+ disp_node.name = '3DC_dispnode'
+
+ node.name = '3DC_' + type['name']
+ node.label = type['name']
+
+ if (type['name'] != 'displacement'):
+ for input_index in type['find_input']:
+ input_color = main_mat.inputs.find(input_index)
+ if(input_color != -1):
+ break
+
+ if (tile_list == []):
+
+ load_image = True
+
+ for image in bpy.data.images:
+ if(texcoat[type['name']][0] == image.filepath):
+ load_image = False
+ node.image = image
+ break
+
+ if (load_image):
+ node.image = bpy.data.images.load(texcoat[type['name']][0])
+
+ if node.image and type['colorspace'] == 'noncolor':
+ node.image.colorspace_settings.is_data = True
+
+ if (coat3D.createnodes):
+
+ if(type['name'] == 'nmap'):
+ act_material.links.new(node.outputs[0], normal_node.inputs[1])
+ if(input_color != -1):
+ act_material.links.new(normal_node.outputs[0], main_mat.inputs[input_color])
+
+ act_material.links.new(normal_node.outputs[0], notegroup.inputs[type['input']])
+ if (main_mat.inputs[input_color].name == 'Normal' and input_color != -1):
+ main_material.links.new(applink_tree.outputs[type['input']], main_mat.inputs[input_color])
+
+ elif (type['name'] == 'displacement'):
+
+ rampnode = act_material.nodes.new('ShaderNodeValToRGB')
+ rampnode.name = '3DC_ColorRamp'
+ rampnode.use_custom_color = True
+ rampnode.color = (type['node_color'][0], type['node_color'][1], type['node_color'][2])
+ rampnode.location = -270, -956
+
+ act_material.links.new(node.outputs[0], rampnode.inputs[0])
+ act_material.links.new(rampnode.outputs[0], notegroup.inputs[5])
+
+ main_material.links.new(applink_tree.outputs[5], disp_node.inputs[0])
+ main_material.links.new(disp_node.outputs[0], out_mat.inputs[2])
+ coatMat.cycles.displacement_method = 'BOTH'
+
+ else:
+ if (texcoat['alpha'] != []):
+ if (type['name'] == 'alpha'):
+ act_material.links.new(node.outputs[1], notegroup.inputs[8])
+ else:
+ if (type['name'] == 'color'):
+ act_material.links.new(node.outputs[1], notegroup.inputs[8])
+ if(type['name'] != 'alpha'):
+ huenode = createExtraNodes(act_material, node, type, notegroup)
+ else:
+ huenode = node
+ huenode.location = -100, -800
+
+ if(type['name'] != 'alpha'):
+ act_material.links.new(huenode.outputs[0], notegroup.inputs[type['input']])
+ if (main_mat.type != 'MIX_SHADER' and input_color != -1):
+ main_material.links.new(applink_tree.outputs[type['input']], main_mat.inputs[input_color])
+ if(type['name'] == 'color'): #Alpha connection into Principled shader
+ main_material.links.new(applink_tree.outputs['Alpha'], main_mat.inputs['Alpha'])
+
+ else:
+ location = main_mat.location
+ #applink_tree.location = main_mat.location[0], main_mat.location[1] + 200
+
+ if(type['name'] == 'emissive'):
+ for material in main_material.nodes:
+ if(material.name == '3DC_Emission'):
+ main_material.links.new(applink_tree.outputs[type['input']], material.inputs[0])
+ break
+ if(tile_list == []):
+ uv_node.location = node.location
+ uv_node.location[0] -= 300
+ uv_node.location[1] -= 200
+
+ else:
+ node.location = type['node_location'][0], type['node_location'][1]
+ if (tile_list == []):
+ uv_node.location = node.location
+ uv_node.location[0] -= 300
+ act_material.links.new(node.outputs[0], notegroup.inputs[type['input']])
+ if (input_color != -1):
+ main_material.links.new(applink_tree.outputs[type['input']], main_mat.inputs[input_color])
+
+
+def createExtraNodes(act_material, node, type, notegroup):
+
+ curvenode = act_material.nodes.new('ShaderNodeRGBCurve')
+ curvenode.name = '3DC_RGBCurve'
+ curvenode.use_custom_color = True
+ curvenode.color = (type['node_color'][0], type['node_color'][1], type['node_color'][2])
+
+ if(type['huenode'] == 'yes'):
+ huenode = act_material.nodes.new('ShaderNodeHueSaturation')
+ huenode.name = '3DC_HueSaturation'
+ huenode.use_custom_color = True
+ huenode.color = (type['node_color'][0], type['node_color'][1], type['node_color'][2])
+ else:
+ huenode = act_material.nodes.new('ShaderNodeMath')
+ huenode.name = '3DC_HueSaturation'
+ huenode.operation = 'MULTIPLY'
+ huenode.inputs[1].default_value = 1
+ huenode.use_custom_color = True
+ huenode.color = (type['node_color'][0], type['node_color'][1], type['node_color'][2])
+
+
+ if(type['rampnode'] == 'yes'):
+ rampnode = act_material.nodes.new('ShaderNodeValToRGB')
+ rampnode.name = '3DC_ColorRamp'
+ rampnode.use_custom_color = True
+ rampnode.color = (type['node_color'][0], type['node_color'][1], type['node_color'][2])
+
+ if (type['rampnode'] == 'yes'):
+ act_material.links.new(node.outputs[0], curvenode.inputs[1])
+ act_material.links.new(curvenode.outputs[0], rampnode.inputs[0])
+ if(type['huenode'] == 'yes'):
+ act_material.links.new(rampnode.outputs[0], huenode.inputs[4])
+ else:
+ act_material.links.new(rampnode.outputs[0], huenode.inputs[0])
+ else:
+ act_material.links.new(node.outputs[0], curvenode.inputs[1])
+ if (type['huenode'] == 'yes'):
+ act_material.links.new(curvenode.outputs[0], huenode.inputs[4])
+ else:
+ act_material.links.new(curvenode.outputs[0], huenode.inputs[0])
+
+ if type['name'] == 'metalness':
+ node.location = -1300, 119
+ curvenode.location = -1000, 113
+ rampnode.location = -670, 115
+ huenode.location = -345, 118
+
+ elif type['name'] == 'rough':
+ node.location = -1300, -276
+ curvenode.location = -1000, -245
+ rampnode.location = -670, -200
+ huenode.location = -340, -100
+
+ elif type['name'] == 'color':
+ node.location = -990, 530
+ curvenode.location = -660, 480
+ huenode.location = -337, 335
+
+ elif type['name'] == 'emissive':
+ node.location = -1200, -900
+ curvenode.location = -900, -900
+ huenode.location = -340, -700
+
+ elif type['name'] == 'alpha':
+ node.location = -1200, -1200
+ curvenode.location = -900, -1250
+ rampnode.location = -600, -1200
+ huenode.location = -300, -1200
+
+ if(type['name'] == 'color'):
+ node_vertex = act_material.nodes.new('ShaderNodeVertexColor')
+ node_mixRGB = act_material.nodes.new('ShaderNodeMixRGB')
+ node_vectormath = act_material.nodes.new('ShaderNodeVectorMath')
+
+ node_mixRGB.blend_type = 'MULTIPLY'
+ node_mixRGB.inputs[0].default_value = 1
+
+ node_vectormath.operation = 'MULTIPLY'
+ node_vectormath.inputs[1].default_value = [2,2,2]
+
+ node_vertex.layer_name = 'Col'
+
+ node_vertex.location = -337, 525
+ node_mixRGB.location = 0, 425
+
+ act_material.links.new(node_vertex.outputs[0], node_mixRGB.inputs[1])
+ act_material.links.new(huenode.outputs[0], node_mixRGB.inputs[2])
+ act_material.links.new(node_vertex.outputs[1], notegroup.inputs[8])
+ act_material.links.new(node_mixRGB.outputs[0], node_vectormath.inputs[0])
+
+ return node_vectormath
+
+ return huenode
+
+def matlab(objekti,mat_list,texturelist,is_new):
+
+ print('Welcome facture matlab function')
+
+ ''' FBX Materials: remove all nodes and create princibles node'''
+ if(is_new):
+ RemoveFbxNodes(objekti)
+
+ '''Main Loop for Texture Update'''
+
+ updatetextures(objekti)
+
+ ''' Check if bind textures with UVs or Materials '''
+
+ if(texturelist != []):
+
+ udim_textures = False
+ if texturelist[0][0].startswith('100'):
+ udim_textures = True
+
+ if(udim_textures == False):
+ readtexturefolder(objekti,mat_list,texturelist,is_new, udim_textures)
+ else:
+ path = texturelist[0][3]
+ only_name = os.path.basename(path)
+ if(only_name.startswith(objekti.coat3D.applink_index)):
+ readtexturefolder(objekti, mat_list, texturelist, is_new, udim_textures)
+
+
+ return('FINISHED')
diff --git a/io_mesh_ply/__init__.py b/io_mesh_ply/__init__.py
index 5e12bfce..a3f08ebd 100644
--- a/io_mesh_ply/__init__.py
+++ b/io_mesh_ply/__init__.py
@@ -20,10 +20,10 @@
bl_info = {
"name": "Stanford PLY format",
- "author": "Bruce Merry, Campbell Barton",
- "version": (1, 1, 0),
- "blender": (2, 82, 0),
- "location": "File > Import-Export",
+ "author": "Bruce Merry, Campbell Barton", "Bastien Montagne"
+ "version": (2, 1, 0),
+ "blender": (2, 90, 0),
+ "location": "File > Import/Export",
"description": "Import-Export PLY mesh data with UVs and vertex colors",
"doc_url": "{BLENDER_MANUAL_URL}/addons/import_export/mesh_ply.html",
"support": 'OFFICIAL',
@@ -52,7 +52,7 @@ from bpy_extras.io_utils import (
ImportHelper,
ExportHelper,
axis_conversion,
- orientation_helper
+ orientation_helper,
)
@@ -64,11 +64,9 @@ class ImportPLY(bpy.types.Operator, ImportHelper):
files: CollectionProperty(
name="File Path",
- description=(
- "File path used for importing "
- "the PLY file"
- ),
- type=bpy.types.OperatorFileListElement)
+ description="File path used for importing the PLY file",
+ type=bpy.types.OperatorFileListElement,
+ )
# Hide opertator properties, rest of this is managed in C. See WM_operator_properties_filesel().
hide_props_region: BoolProperty(
@@ -84,17 +82,23 @@ class ImportPLY(bpy.types.Operator, ImportHelper):
def execute(self, context):
import os
+ from . import import_ply
+
+ context.window.cursor_set('WAIT')
+
+ paths = [
+ os.path.join(self.directory, name.name)
+ for name in self.files
+ ]
- paths = [os.path.join(self.directory, name.name)
- for name in self.files]
if not paths:
paths.append(self.filepath)
- from . import import_ply
-
for path in paths:
import_ply.load(self, context, path)
+ context.window.cursor_set('DEFAULT')
+
return {'FINISHED'}
@@ -107,6 +111,10 @@ class ExportPLY(bpy.types.Operator, ExportHelper):
filename_ext = ".ply"
filter_glob: StringProperty(default="*.ply", options={'HIDDEN'})
+ use_ascii: BoolProperty(
+ name="ASCII",
+ description="Export using ASCII file format, otherwise use binary",
+ )
use_selection: BoolProperty(
name="Selection Only",
description="Export selected objects only",
@@ -120,10 +128,8 @@ class ExportPLY(bpy.types.Operator, ExportHelper):
use_normals: BoolProperty(
name="Normals",
description=(
- "Export Normals for smooth and "
- "hard shaded faces "
- "(hard shaded faces will be exported "
- "as individual faces)"
+ "Export Normals for smooth and hard shaded faces "
+ "(hard shaded faces will be exported as individual faces)"
),
default=True,
)
@@ -137,17 +143,18 @@ class ExportPLY(bpy.types.Operator, ExportHelper):
description="Export the active vertex color layer",
default=True,
)
-
global_scale: FloatProperty(
name="Scale",
- min=0.01, max=1000.0,
+ min=0.01,
+ max=1000.0,
default=1.0,
)
def execute(self, context):
+ from mathutils import Matrix
from . import export_ply
- from mathutils import Matrix
+ context.window.cursor_set('WAIT')
keywords = self.as_keywords(
ignore=(
@@ -164,13 +171,22 @@ class ExportPLY(bpy.types.Operator, ExportHelper):
).to_4x4() @ Matrix.Scale(self.global_scale, 4)
keywords["global_matrix"] = global_matrix
- filepath = self.filepath
- filepath = bpy.path.ensure_ext(filepath, self.filename_ext)
+ export_ply.save(context, **keywords)
- return export_ply.save(self, context, **keywords)
+ context.window.cursor_set('DEFAULT')
+
+ return {'FINISHED'}
def draw(self, context):
- pass
+ layout = self.layout
+ layout.use_property_split = True
+ layout.use_property_decorate = False
+
+ sfile = context.space_data
+ operator = sfile.active_operator
+
+ col = layout.column(heading="Format")
+ col.prop(operator, "use_ascii")
class PLY_PT_export_include(bpy.types.Panel):
@@ -189,7 +205,7 @@ class PLY_PT_export_include(bpy.types.Panel):
def draw(self, context):
layout = self.layout
layout.use_property_split = True
- layout.use_property_decorate = False # No animation.
+ layout.use_property_decorate = False
sfile = context.space_data
operator = sfile.active_operator
@@ -213,7 +229,7 @@ class PLY_PT_export_transform(bpy.types.Panel):
def draw(self, context):
layout = self.layout
layout.use_property_split = True
- layout.use_property_decorate = False # No animation.
+ layout.use_property_decorate = False
sfile = context.space_data
operator = sfile.active_operator
@@ -239,7 +255,7 @@ class PLY_PT_export_geometry(bpy.types.Panel):
def draw(self, context):
layout = self.layout
layout.use_property_split = True
- layout.use_property_decorate = False # No animation.
+ layout.use_property_decorate = False
sfile = context.space_data
operator = sfile.active_operator
diff --git a/io_mesh_ply/export_ply.py b/io_mesh_ply/export_ply.py
index 812aeb54..060b3d02 100644
--- a/io_mesh_ply/export_ply.py
+++ b/io_mesh_ply/export_ply.py
@@ -24,8 +24,55 @@ colors, and texture coordinates per face or per vertex.
"""
-def save_mesh(filepath, mesh, use_normals=True, use_uv_coords=True, use_colors=True):
- import os
+def _write_binary(fw, ply_verts, ply_faces, mesh_verts):
+ from struct import pack
+
+ # Vertex data
+ # ---------------------------
+
+ for index, normal, uv_coords, color in ply_verts:
+ fw(pack("<3f", *mesh_verts[index].co))
+ if normal is not None:
+ fw(pack("<3f", *normal))
+ if uv_coords is not None:
+ fw(pack("<2f", *uv_coords))
+ if color is not None:
+ fw(pack("<4B", *color))
+
+ # Face data
+ # ---------------------------
+
+ for pf in ply_faces:
+ length = len(pf)
+ fw(pack("<B%dI" % length, length, *pf))
+
+
+def _write_ascii(fw, ply_verts, ply_faces, mesh_verts):
+
+ # Vertex data
+ # ---------------------------
+
+ for index, normal, uv_coords, color in ply_verts:
+ fw(b"%.6f %.6f %.6f" % mesh_verts[index].co[:])
+ if normal is not None:
+ fw(b" %.6f %.6f %.6f" % normal)
+ if uv_coords is not None:
+ fw(b" %.6f %.6f" % uv_coords)
+ if color is not None:
+ fw(b" %u %u %u %u" % color)
+ fw(b"\n")
+
+ # Face data
+ # ---------------------------
+
+ for pf in ply_faces:
+ fw(b"%d" % len(pf))
+ for index in pf:
+ fw(b" %d" % index)
+ fw(b"\n")
+
+
+def save_mesh(filepath, mesh, use_ascii, use_normals, use_uv_coords, use_colors):
import bpy
def rvec3d(v):
@@ -56,10 +103,11 @@ def save_mesh(filepath, mesh, use_normals=True, use_uv_coords=True, use_colors=T
for i, f in enumerate(mesh.polygons):
- smooth = not use_normals or f.use_smooth
- if not smooth:
- normal = f.normal[:]
- normal_key = rvec3d(normal)
+ if use_normals:
+ smooth = f.use_smooth
+ if not smooth:
+ normal = f.normal[:]
+ normal_key = rvec3d(normal)
if use_uv_coords:
uv = [
@@ -76,7 +124,7 @@ def save_mesh(filepath, mesh, use_normals=True, use_uv_coords=True, use_colors=T
for j, vidx in enumerate(f.vertices):
v = mesh_verts[vidx]
- if smooth:
+ if use_normals and smooth:
normal = v.normal[:]
normal_key = rvec3d(normal)
@@ -104,90 +152,72 @@ def save_mesh(filepath, mesh, use_normals=True, use_uv_coords=True, use_colors=T
pf.append(pf_vidx)
- with open(filepath, "w", encoding="utf-8", newline="\n") as file:
+ with open(filepath, "wb") as file:
fw = file.write
+ file_format = b"ascii" if use_ascii else b"binary_little_endian"
# Header
# ---------------------------
- fw("ply\n")
- fw("format ascii 1.0\n")
- fw(
- f"comment Created by Blender {bpy.app.version_string} - "
- f"www.blender.org, source file: {os.path.basename(bpy.data.filepath)!r}\n"
- )
+ fw(b"ply\n")
+ fw(b"format %s 1.0\n" % file_format)
+ fw(b"comment Created by Blender %s - www.blender.org\n" % bpy.app.version_string.encode("utf-8"))
- fw(f"element vertex {len(ply_verts)}\n")
+ fw(b"element vertex %d\n" % len(ply_verts))
fw(
- "property float x\n"
- "property float y\n"
- "property float z\n"
+ b"property float x\n"
+ b"property float y\n"
+ b"property float z\n"
)
if use_normals:
fw(
- "property float nx\n"
- "property float ny\n"
- "property float nz\n"
+ b"property float nx\n"
+ b"property float ny\n"
+ b"property float nz\n"
)
if use_uv_coords:
fw(
- "property float s\n"
- "property float t\n"
+ b"property float s\n"
+ b"property float t\n"
)
if use_colors:
fw(
- "property uchar red\n"
- "property uchar green\n"
- "property uchar blue\n"
- "property uchar alpha\n"
+ b"property uchar red\n"
+ b"property uchar green\n"
+ b"property uchar blue\n"
+ b"property uchar alpha\n"
)
- fw(f"element face {len(mesh.polygons)}\n")
- fw("property list uchar uint vertex_indices\n")
-
- fw("end_header\n")
+ fw(b"element face %d\n" % len(mesh.polygons))
+ fw(b"property list uchar uint vertex_indices\n")
+ fw(b"end_header\n")
- # Vertex data
+ # Geometry
# ---------------------------
- for i, v in enumerate(ply_verts):
- fw("%.6f %.6f %.6f" % mesh_verts[v[0]].co[:])
- if use_normals:
- fw(" %.6f %.6f %.6f" % v[1])
- if use_uv_coords:
- fw(" %.6f %.6f" % v[2])
- if use_colors:
- fw(" %u %u %u %u" % v[3])
- fw("\n")
-
- # Face data
- # ---------------------------
-
- for pf in ply_faces:
- fw(f"{len(pf)}")
- for v in pf:
- fw(f" {v}")
- fw("\n")
-
- print(f"Writing {filepath!r} done")
-
- return {'FINISHED'}
+ if use_ascii:
+ _write_ascii(fw, ply_verts, ply_faces, mesh_verts)
+ else:
+ _write_binary(fw, ply_verts, ply_faces, mesh_verts)
def save(
- operator,
context,
filepath="",
+ use_ascii=False,
use_selection=False,
use_mesh_modifiers=True,
use_normals=True,
use_uv_coords=True,
use_colors=True,
- global_matrix=None
+ global_matrix=None,
):
+ import time
import bpy
import bmesh
+ t = time.time()
+
if bpy.ops.object.mode_set.poll():
bpy.ops.object.mode_set(mode='OBJECT')
@@ -224,14 +254,16 @@ def save(
if use_normals:
mesh.calc_normals()
- ret = save_mesh(
+ save_mesh(
filepath,
mesh,
- use_normals=use_normals,
- use_uv_coords=use_uv_coords,
- use_colors=use_colors,
+ use_ascii,
+ use_normals,
+ use_uv_coords,
+ use_colors,
)
bpy.data.meshes.remove(mesh)
- return ret
+ t_delta = time.time() - t
+ print(f"Export completed {filepath!r} in {t_delta:.3f}")
diff --git a/io_mesh_ply/import_ply.py b/io_mesh_ply/import_ply.py
index 2bf91442..d9d12d67 100644
--- a/io_mesh_ply/import_ply.py
+++ b/io_mesh_ply/import_ply.py
@@ -110,19 +110,12 @@ class ObjectSpec:
self.specs = []
def load(self, format, stream):
- return dict([(i.name, [i.load(format, stream) for j in range(i.count)]) for i in self.specs])
-
- # Longhand for above LC
- """
- answer = {}
- for i in self.specs:
- answer[i.name] = []
- for j in range(i.count):
- if not j % 100 and meshtools.show_progress:
- Blender.Window.DrawProgressBar(float(j) / i.count, 'Loading ' + i.name)
- answer[i.name].append(i.load(format, stream))
- return answer
- """
+ return {
+ i.name: [
+ i.load(format, stream) for j in range(i.count)
+ ]
+ for i in self.specs
+ }
def read(filepath):
@@ -159,14 +152,45 @@ def read(filepath):
invalid_ply = (None, None, None)
with open(filepath, 'rb') as plyf:
- signature = plyf.readline()
+ signature = plyf.peek(5)
- if not signature.startswith(b'ply'):
+ if not signature.startswith(b'ply') or not len(signature) >= 5:
print("Signature line was invalid")
return invalid_ply
+ custom_line_sep = None
+ if signature[3] != ord(b'\n'):
+ if signature[3] != ord(b'\r'):
+ print("Unknown line separator")
+ return invalid_ply
+ if signature[4] == ord(b'\n'):
+ custom_line_sep = b"\r\n"
+ else:
+ custom_line_sep = b"\r"
+
+ # Work around binary file reading only accepting "\n" as line separator.
+ plyf_header_line_iterator = lambda plyf: plyf
+ if custom_line_sep is not None:
+ def _plyf_header_line_iterator(plyf):
+ buff = plyf.peek(2**16)
+ while len(buff) != 0:
+ read_bytes = 0
+ buff = buff.split(custom_line_sep)
+ for line in buff[:-1]:
+ read_bytes += len(line) + len(custom_line_sep)
+ if line.startswith(b'end_header'):
+ # Since reader code might (will) break iteration at this point,
+ # we have to ensure file is read up to here, yield, amd return...
+ plyf.read(read_bytes)
+ yield line
+ return
+ yield line
+ plyf.read(read_bytes)
+ buff = buff[-1] + plyf.peek(2**16)
+ plyf_header_line_iterator = _plyf_header_line_iterator
+
valid_header = False
- for line in plyf:
+ for line in plyf_header_line_iterator(plyf):
tokens = re.split(br'[ \r\n]+', line)
if len(tokens) == 0:
@@ -279,20 +303,20 @@ def load_ply_mesh(filepath, ply_name):
if len(colindices) == 3:
mesh_colors.extend([
(
- vertices[index][colindices[0]] * colmultiply[0],
- vertices[index][colindices[1]] * colmultiply[1],
- vertices[index][colindices[2]] * colmultiply[2],
- 1.0
+ vertices[index][colindices[0]] * colmultiply[0],
+ vertices[index][colindices[1]] * colmultiply[1],
+ vertices[index][colindices[2]] * colmultiply[2],
+ 1.0,
)
for index in indices
])
elif len(colindices) == 4:
mesh_colors.extend([
(
- vertices[index][colindices[0]] * colmultiply[0],
- vertices[index][colindices[1]] * colmultiply[1],
- vertices[index][colindices[2]] * colmultiply[2],
- vertices[index][colindices[3]] * colmultiply[3],
+ vertices[index][colindices[0]] * colmultiply[0],
+ vertices[index][colindices[1]] * colmultiply[1],
+ vertices[index][colindices[2]] * colmultiply[2],
+ vertices[index][colindices[3]] * colmultiply[3],
)
for index in indices
])
@@ -415,12 +439,16 @@ def load_ply(filepath):
if not mesh:
return {'CANCELLED'}
+ for ob in bpy.context.selected_objects:
+ ob.select_set(False)
+
obj = bpy.data.objects.new(ply_name, mesh)
bpy.context.collection.objects.link(obj)
bpy.context.view_layer.objects.active = obj
obj.select_set(True)
print("\nSuccessfully imported %r in %.3f sec" % (filepath, time.time() - t))
+
return {'FINISHED'}
diff --git a/io_mesh_uv_layout/export_uv_png.py b/io_mesh_uv_layout/export_uv_png.py
index f831402f..7fd3ba09 100644
--- a/io_mesh_uv_layout/export_uv_png.py
+++ b/io_mesh_uv_layout/export_uv_png.py
@@ -30,6 +30,7 @@ def export(filepath, face_data, colors, width, height, opacity):
offscreen.bind()
try:
+ bgl.glClearColor(0.0, 0.0, 0.0, 0.0)
bgl.glClear(bgl.GL_COLOR_BUFFER_BIT)
draw_image(face_data, opacity)
diff --git a/io_scene_gltf2/__init__.py b/io_scene_gltf2/__init__.py
index 3ea1ce11..74338ef0 100755
--- a/io_scene_gltf2/__init__.py
+++ b/io_scene_gltf2/__init__.py
@@ -14,8 +14,8 @@
bl_info = {
'name': 'glTF 2.0 format',
- 'author': 'Julien Duroure, Norbert Nopper, Urs Hanselmann, Moritz Becher, Benjamin Schmithüsen, Jim Eckerlein, and many external contributors',
- "version": (1, 3, 28),
+ 'author': 'Julien Duroure, Scurest, Norbert Nopper, Urs Hanselmann, Moritz Becher, Benjamin Schmithüsen, Jim Eckerlein, and many external contributors',
+ "version": (1, 4, 7),
'blender': (2, 90, 0),
'location': 'File > Import-Export',
'description': 'Import-Export as glTF 2.0',
@@ -114,10 +114,10 @@ class ExportGLTF2_Base:
export_image_format: EnumProperty(
name='Images',
items=(('AUTO', 'Automatic',
- 'Save PNGs as PNGs and JPEGs as JPEGs.\n'
+ 'Save PNGs as PNGs and JPEGs as JPEGs. '
'If neither one, use PNG'),
('JPEG', 'JPEG Format (.jpg)',
- 'Save images as JPEGs. (Images that need alpha are saved as PNGs though.)\n'
+ 'Save images as JPEGs. (Images that need alpha are saved as PNGs though.) '
'Be aware of a possible loss in quality'),
),
description=(
@@ -276,8 +276,8 @@ class ExportGLTF2_Base:
export_nla_strips: BoolProperty(
name='Group by NLA Track',
description=(
- "When on, multiple actions become part of the same glTF animation if\n"
- "they're pushed onto NLA tracks with the same name.\n"
+ "When on, multiple actions become part of the same glTF animation if "
+ "they're pushed onto NLA tracks with the same name. "
"When off, all the currently assigned actions become one glTF animation"
),
default=True
@@ -485,6 +485,8 @@ class ExportGLTF2_Base:
bpy.path.ensure_ext(self.filepath,self.filename_ext)))[0] + '.bin'
user_extensions = []
+ pre_export_callbacks = []
+ post_export_callbacks = []
import sys
preferences = bpy.context.preferences
@@ -500,7 +502,13 @@ class ExportGLTF2_Base:
extension_ctors = module.glTF2ExportUserExtensions
for extension_ctor in extension_ctors:
user_extensions.append(extension_ctor())
+ if hasattr(module, 'glTF2_pre_export_callback'):
+ pre_export_callbacks.append(module.glTF2_pre_export_callback)
+ if hasattr(module, 'glTF2_post_export_callback'):
+ post_export_callbacks.append(module.glTF2_post_export_callback)
export_settings['gltf_user_extensions'] = user_extensions
+ export_settings['pre_export_callbacks'] = pre_export_callbacks
+ export_settings['post_export_callbacks'] = post_export_callbacks
return gltf2_blender_export.save(context, export_settings)
@@ -836,6 +844,7 @@ class ImportGLTF2(Operator, ImportHelper):
"""Load a glTF 2.0 file"""
bl_idname = 'import_scene.gltf'
bl_label = 'Import glTF 2.0'
+ bl_options = {'REGISTER', 'UNDO'}
filter_glob: StringProperty(default="*.glb;*.gltf", options={'HIDDEN'})
@@ -854,6 +863,18 @@ class ImportGLTF2(Operator, ImportHelper):
default=True
)
+ merge_vertices: BoolProperty(
+ name='Merge Vertices',
+ description=(
+ 'The glTF format requires discontinuous normals, UVs, and '
+ 'other vertex attributes to be stored as separate vertices, '
+ 'as required for rendering on typical graphics hardware. '
+ 'This option attempts to combine co-located vertices where possible. '
+ 'Currently cannot combine verts with different normals'
+ ),
+ default=False,
+ )
+
import_shading: EnumProperty(
name="Shading",
items=(("NORMALS", "Use Normal Data", ""),
@@ -866,15 +887,15 @@ class ImportGLTF2(Operator, ImportHelper):
name="Bone Dir",
items=(
("BLENDER", "Blender (best for re-importing)",
- "Good for re-importing glTFs exported from Blender.\n"
+ "Good for re-importing glTFs exported from Blender. "
"Bone tips are placed on their local +Y axis (in glTF space)"),
("TEMPERANCE", "Temperance (average)",
- "Decent all-around strategy.\n"
- "A bone with one child has its tip placed on the local axis\n"
+ "Decent all-around strategy. "
+ "A bone with one child has its tip placed on the local axis "
"closest to its child"),
("FORTUNE", "Fortune (may look better, less accurate)",
- "Might look better than Temperance, but also might have errors.\n"
- "A bone with one child has its tip placed at its child's root.\n"
+ "Might look better than Temperance, but also might have errors. "
+ "A bone with one child has its tip placed at its child's root. "
"Non-uniform scalings may get messed up though, so beware"),
),
description="Heuristic for placing bones. Tries to make bones pretty",
@@ -885,7 +906,7 @@ class ImportGLTF2(Operator, ImportHelper):
name='Guess Original Bind Pose',
description=(
'Try to guess the original bind pose for skinned meshes from '
- 'the inverse bind matrices.\n'
+ 'the inverse bind matrices. '
'When off, use default/rest pose as bind pose'
),
default=True,
@@ -898,6 +919,7 @@ class ImportGLTF2(Operator, ImportHelper):
layout.use_property_decorate = False # No animation.
layout.prop(self, 'import_pack_images')
+ layout.prop(self, 'merge_vertices')
layout.prop(self, 'import_shading')
layout.prop(self, 'guess_original_bind_pose')
layout.prop(self, 'bone_heuristic')
diff --git a/io_scene_gltf2/blender/com/gltf2_blender_math.py b/io_scene_gltf2/blender/com/gltf2_blender_math.py
index 72eb124a..bddc79a6 100755
--- a/io_scene_gltf2/blender/com/gltf2_blender_math.py
+++ b/io_scene_gltf2/blender/com/gltf2_blender_math.py
@@ -19,11 +19,6 @@ from mathutils import Matrix, Vector, Quaternion, Euler
from io_scene_gltf2.blender.com.gltf2_blender_data_path import get_target_property_name
-def multiply(a, b):
- """Multiplication."""
- return a @ b
-
-
def list_to_mathutils(values: typing.List[float], data_path: str) -> typing.Union[Vector, Quaternion, Euler]:
"""Transform a list to blender py object."""
target = get_target_property_name(data_path)
@@ -31,7 +26,7 @@ def list_to_mathutils(values: typing.List[float], data_path: str) -> typing.Unio
if target == 'delta_location':
return Vector(values) # TODO Should be Vector(values) - Vector(something)?
elif target == 'delta_rotation_euler':
- return Euler(values).to_quaternion() # TODO Should be multiply(Euler(values).to_quaternion(), something)?
+ return Euler(values).to_quaternion() # TODO Should be Euler(values).to_quaternion() @ something?
elif target == 'location':
return Vector(values)
elif target == 'rotation_axis_angle':
@@ -138,7 +133,7 @@ def transform(v: typing.Union[Vector, Quaternion], data_path: str, transform: Ma
def transform_location(location: Vector, transform: Matrix = Matrix.Identity(4)) -> Vector:
"""Transform location."""
m = Matrix.Translation(location)
- m = multiply(transform, m)
+ m = transform @ m
return m.to_translation()
@@ -146,7 +141,7 @@ def transform_rotation(rotation: Quaternion, transform: Matrix = Matrix.Identity
"""Transform rotation."""
rotation.normalize()
m = rotation.to_matrix().to_4x4()
- m = multiply(transform, m)
+ m = transform @ m
return m.to_quaternion()
@@ -156,7 +151,7 @@ def transform_scale(scale: Vector, transform: Matrix = Matrix.Identity(4)) -> Ve
m[0][0] = scale.x
m[1][1] = scale.y
m[2][2] = scale.z
- m = multiply(transform, m)
+ m = transform @ m
return m.to_scale()
diff --git a/io_scene_gltf2/blender/exp/gltf2_blender_export.py b/io_scene_gltf2/blender/exp/gltf2_blender_export.py
index 2989ec31..fd433c7e 100755
--- a/io_scene_gltf2/blender/exp/gltf2_blender_export.py
+++ b/io_scene_gltf2/blender/exp/gltf2_blender_export.py
@@ -39,7 +39,15 @@ def save(context, export_settings):
__notify_start(context)
start_time = time.time()
+ pre_export_callbacks = export_settings["pre_export_callbacks"]
+ for callback in pre_export_callbacks:
+ callback(export_settings)
+
json, buffer = __export(export_settings)
+
+ post_export_callbacks = export_settings["post_export_callbacks"]
+ for callback in post_export_callbacks:
+ callback(export_settings)
__write_file(json, buffer, export_settings)
end_time = time.time()
diff --git a/io_scene_gltf2/blender/exp/gltf2_blender_extract.py b/io_scene_gltf2/blender/exp/gltf2_blender_extract.py
index e2d224ce..eef05044 100755
--- a/io_scene_gltf2/blender/exp/gltf2_blender_extract.py
+++ b/io_scene_gltf2/blender/exp/gltf2_blender_extract.py
@@ -17,46 +17,26 @@
#
from mathutils import Vector, Quaternion, Matrix
-from mathutils.geometry import tessellate_polygon
-from operator import attrgetter
from . import gltf2_blender_export_keys
from ...io.com.gltf2_io_debug import print_console
from ...io.com.gltf2_io_color_management import color_srgb_to_scene_linear
from io_scene_gltf2.blender.exp import gltf2_blender_gather_skins
-#
-# Globals
-#
-
-INDICES_ID = 'indices'
-MATERIAL_ID = 'material'
-ATTRIBUTES_ID = 'attributes'
-
-COLOR_PREFIX = 'COLOR_'
-MORPH_TANGENT_PREFIX = 'MORPH_TANGENT_'
-MORPH_NORMAL_PREFIX = 'MORPH_NORMAL_'
-MORPH_POSITION_PREFIX = 'MORPH_POSITION_'
-TEXCOORD_PREFIX = 'TEXCOORD_'
-WEIGHTS_PREFIX = 'WEIGHTS_'
-JOINTS_PREFIX = 'JOINTS_'
-
-TANGENT_ATTRIBUTE = 'TANGENT'
-NORMAL_ATTRIBUTE = 'NORMAL'
-POSITION_ATTRIBUTE = 'POSITION'
-
-GLTF_MAX_COLORS = 2
-
#
# Classes
#
+class Prim:
+ def __init__(self):
+ self.verts = {}
+ self.indices = []
+
class ShapeKey:
- def __init__(self, shape_key, vertex_normals, polygon_normals):
+ def __init__(self, shape_key, split_normals):
self.shape_key = shape_key
- self.vertex_normals = vertex_normals
- self.polygon_normals = polygon_normals
+ self.split_normals = split_normals
#
@@ -110,17 +90,17 @@ def convert_swizzle_tangent(tan, armature, blender_object, export_settings):
if (not armature) or (not blender_object):
# Classic case. Mesh is not skined, no need to apply armature transfoms on vertices / normals / tangents
if export_settings[gltf2_blender_export_keys.YUP]:
- return Vector((tan[0], tan[2], -tan[1], 1.0))
+ return Vector((tan[0], tan[2], -tan[1]))
else:
- return Vector((tan[0], tan[1], tan[2], 1.0))
+ return Vector((tan[0], tan[1], tan[2]))
else:
# Mesh is skined, we have to apply armature transforms on data
apply_matrix = armature.matrix_world.inverted() @ blender_object.matrix_world
- new_tan = apply_matrix.to_quaternion() @ tan
+ new_tan = apply_matrix.to_quaternion() @ Vector((tan[0], tan[1], tan[2]))
if export_settings[gltf2_blender_export_keys.YUP]:
- return Vector((new_tan[0], new_tan[2], -new_tan[1], 1.0))
+ return Vector((new_tan[0], new_tan[2], -new_tan[1]))
else:
- return Vector((new_tan[0], new_tan[1], new_tan[2], 1.0))
+ return Vector((new_tan[0], new_tan[1], new_tan[2]))
def convert_swizzle_rotation(rot, export_settings):
"""
@@ -142,567 +122,293 @@ def convert_swizzle_scale(scale, export_settings):
return Vector((scale[0], scale[1], scale[2]))
-def decompose_transition(matrix, export_settings):
- translation, rotation, scale = matrix.decompose()
-
- return translation, rotation, scale
-
-
def extract_primitives(glTF, blender_mesh, library, blender_object, blender_vertex_groups, modifiers, export_settings):
"""
Extract primitives from a mesh. Polygons are triangulated and sorted by material.
-
- Furthermore, primitives are split up, if the indices range is exceeded.
- Finally, triangles are also split up/duplicated, if face normals are used instead of vertex normals.
+ Vertices in multiple faces get split up as necessary.
"""
print_console('INFO', 'Extracting primitive: ' + blender_mesh.name)
- if blender_mesh.has_custom_normals:
- # Custom normals are all (0, 0, 0) until calling calc_normals_split() or calc_tangents().
- blender_mesh.calc_normals_split()
-
- use_tangents = False
- if blender_mesh.uv_layers.active and len(blender_mesh.uv_layers) > 0:
- try:
- blender_mesh.calc_tangents()
- use_tangents = True
- except Exception:
- print_console('WARNING', 'Could not calculate tangents. Please try to triangulate the mesh first.')
-
- #
-
- material_map = {}
-
- #
- # Gathering position, normal and tex_coords.
- #
- no_material_attributes = {
- POSITION_ATTRIBUTE: [],
- NORMAL_ATTRIBUTE: []
- }
-
- if use_tangents:
- no_material_attributes[TANGENT_ATTRIBUTE] = []
-
- #
- # Directory of materials with its primitive.
- #
- no_material_primitives = {
- MATERIAL_ID: 0,
- INDICES_ID: [],
- ATTRIBUTES_ID: no_material_attributes
- }
-
- material_idx_to_primitives = {0: no_material_primitives}
-
#
-
- vertex_index_to_new_indices = {}
-
- material_map[0] = vertex_index_to_new_indices
-
+ # First, decide what attributes to gather (eg. how many COLOR_n, etc.)
+ # Also calculate normals/tangents now if necessary.
#
- # Create primitive for each material.
- #
- for (mat_idx, _) in enumerate(blender_mesh.materials):
- attributes = {
- POSITION_ATTRIBUTE: [],
- NORMAL_ATTRIBUTE: []
- }
-
- if use_tangents:
- attributes[TANGENT_ATTRIBUTE] = []
- primitive = {
- MATERIAL_ID: mat_idx,
- INDICES_ID: [],
- ATTRIBUTES_ID: attributes
- }
-
- material_idx_to_primitives[mat_idx] = primitive
-
- #
-
- vertex_index_to_new_indices = {}
+ use_normals = export_settings[gltf2_blender_export_keys.NORMALS]
+ if use_normals:
+ blender_mesh.calc_normals_split()
- material_map[mat_idx] = vertex_index_to_new_indices
+ use_tangents = False
+ if use_normals and export_settings[gltf2_blender_export_keys.TANGENTS]:
+ if blender_mesh.uv_layers.active and len(blender_mesh.uv_layers) > 0:
+ try:
+ blender_mesh.calc_tangents()
+ use_tangents = True
+ except Exception:
+ print_console('WARNING', 'Could not calculate tangents. Please try to triangulate the mesh first.')
tex_coord_max = 0
- if blender_mesh.uv_layers.active:
- tex_coord_max = len(blender_mesh.uv_layers)
-
- #
-
- vertex_colors = {}
-
- color_index = 0
- for vertex_color in blender_mesh.vertex_colors:
- vertex_color_name = COLOR_PREFIX + str(color_index)
- vertex_colors[vertex_color_name] = vertex_color
-
- color_index += 1
- if color_index >= GLTF_MAX_COLORS:
- break
- color_max = color_index
-
- #
-
- bone_max = 0
- for blender_polygon in blender_mesh.polygons:
- for loop_index in blender_polygon.loop_indices:
- vertex_index = blender_mesh.loops[loop_index].vertex_index
- bones_count = len(blender_mesh.vertices[vertex_index].groups)
- if bones_count > 0:
- if bones_count % 4 == 0:
- bones_count -= 1
- bone_max = max(bone_max, bones_count // 4 + 1)
-
- #
+ if export_settings[gltf2_blender_export_keys.TEX_COORDS]:
+ if blender_mesh.uv_layers.active:
+ tex_coord_max = len(blender_mesh.uv_layers)
- morph_max = 0
+ color_max = 0
+ if export_settings[gltf2_blender_export_keys.COLORS]:
+ color_max = len(blender_mesh.vertex_colors)
- blender_shape_keys = []
-
- if blender_mesh.shape_keys is not None:
+ bone_max = 0 # number of JOINTS_n sets needed (1 set = 4 influences)
+ armature = None
+ if blender_vertex_groups and export_settings[gltf2_blender_export_keys.SKINS]:
+ if modifiers is not None:
+ modifiers_dict = {m.type: m for m in modifiers}
+ if "ARMATURE" in modifiers_dict:
+ modifier = modifiers_dict["ARMATURE"]
+ armature = modifier.object
+
+ # Skin must be ignored if the object is parented to a bone of the armature
+ # (This creates an infinite recursive error)
+ # So ignoring skin in that case
+ is_child_of_arma = (
+ armature and
+ blender_object and
+ blender_object.parent_type == "BONE" and
+ blender_object.parent.name == armature.name
+ )
+ if is_child_of_arma:
+ armature = None
+
+ if armature:
+ skin = gltf2_blender_gather_skins.gather_skin(armature, export_settings)
+ if not skin:
+ armature = None
+ else:
+ joint_name_to_index = {joint.name: index for index, joint in enumerate(skin.joints)}
+ group_to_joint = [joint_name_to_index.get(g.name) for g in blender_vertex_groups]
+
+ # Find out max number of bone influences
+ for blender_polygon in blender_mesh.polygons:
+ for loop_index in blender_polygon.loop_indices:
+ vertex_index = blender_mesh.loops[loop_index].vertex_index
+ groups_count = len(blender_mesh.vertices[vertex_index].groups)
+ bones_count = (groups_count + 3) // 4
+ bone_max = max(bone_max, bones_count)
+
+ use_morph_normals = use_normals and export_settings[gltf2_blender_export_keys.MORPH_NORMAL]
+ use_morph_tangents = use_morph_normals and use_tangents and export_settings[gltf2_blender_export_keys.MORPH_TANGENT]
+
+ shape_keys = []
+ if blender_mesh.shape_keys and export_settings[gltf2_blender_export_keys.MORPH]:
for blender_shape_key in blender_mesh.shape_keys.key_blocks:
- if blender_shape_key != blender_shape_key.relative_key:
- if blender_shape_key.mute is False:
- morph_max += 1
- blender_shape_keys.append(ShapeKey(
- blender_shape_key,
- blender_shape_key.normals_vertex_get(), # calculate vertex normals for this shape key
- blender_shape_key.normals_polygon_get())) # calculate polygon normals for this shape key
+ if blender_shape_key == blender_shape_key.relative_key or blender_shape_key.mute:
+ continue
+ split_normals = None
+ if use_morph_normals:
+ split_normals = blender_shape_key.normals_split_get()
+
+ shape_keys.append(ShapeKey(
+ blender_shape_key,
+ split_normals,
+ ))
- armature = None
- if modifiers is not None:
- modifiers_dict = {m.type: m for m in modifiers}
- if "ARMATURE" in modifiers_dict:
- modifier = modifiers_dict["ARMATURE"]
- armature = modifier.object
+ use_materials = export_settings[gltf2_blender_export_keys.MATERIALS]
#
- # Convert polygon to primitive indices and eliminate invalid ones. Assign to material.
+ # Gather the verts and indices for each primitive.
#
- for blender_polygon in blender_mesh.polygons:
- export_color = True
- #
+ prims = {}
- if export_settings['gltf_materials'] is False:
- primitive = material_idx_to_primitives[0]
- vertex_index_to_new_indices = material_map[0]
- elif not blender_polygon.material_index in material_idx_to_primitives:
- primitive = material_idx_to_primitives[0]
- vertex_index_to_new_indices = material_map[0]
- else:
- primitive = material_idx_to_primitives[blender_polygon.material_index]
- vertex_index_to_new_indices = material_map[blender_polygon.material_index]
- #
-
- attributes = primitive[ATTRIBUTES_ID]
-
- face_normal = blender_polygon.normal
- face_tangent = Vector((0.0, 0.0, 0.0))
- face_bitangent = Vector((0.0, 0.0, 0.0))
- if use_tangents:
- for loop_index in blender_polygon.loop_indices:
- temp_vertex = blender_mesh.loops[loop_index]
- face_tangent += temp_vertex.tangent
- face_bitangent += temp_vertex.bitangent
-
- face_tangent.normalize()
- face_bitangent.normalize()
-
- #
+ blender_mesh.calc_loop_triangles()
- indices = primitive[INDICES_ID]
+ for loop_tri in blender_mesh.loop_triangles:
+ blender_polygon = blender_mesh.polygons[loop_tri.polygon_index]
- loop_index_list = []
+ material_idx = -1
+ if use_materials:
+ material_idx = blender_polygon.material_index
- if len(blender_polygon.loop_indices) == 3:
- loop_index_list.extend(blender_polygon.loop_indices)
- elif len(blender_polygon.loop_indices) > 3:
- # Triangulation of polygon. Using internal function, as non-convex polygons could exist.
- polyline = []
+ prim = prims.get(material_idx)
+ if not prim:
+ prim = Prim()
+ prims[material_idx] = prim
- for loop_index in blender_polygon.loop_indices:
- vertex_index = blender_mesh.loops[loop_index].vertex_index
- v = blender_mesh.vertices[vertex_index].co
- polyline.append(Vector((v[0], v[1], v[2])))
-
- triangles = tessellate_polygon((polyline,))
-
- for triangle in triangles:
-
- for triangle_index in triangle:
- loop_index_list.append(blender_polygon.loop_indices[triangle_index])
- else:
- continue
-
- for loop_index in loop_index_list:
+ for loop_index in loop_tri.loops:
vertex_index = blender_mesh.loops[loop_index].vertex_index
+ vertex = blender_mesh.vertices[vertex_index]
- if vertex_index_to_new_indices.get(vertex_index) is None:
- vertex_index_to_new_indices[vertex_index] = []
-
- #
-
- v = None
- n = None
- t = None
- b = None
- uvs = []
- colors = []
- joints = []
- weights = []
-
- target_positions = []
- target_normals = []
- target_tangents = []
+ # vert will be a tuple of all the vertex attributes.
+ # Used as cache key in prim.verts.
+ vert = (vertex_index,)
- vertex = blender_mesh.vertices[vertex_index]
+ v = vertex.co
+ vert += ((v[0], v[1], v[2]),)
- v = convert_swizzle_location(vertex.co, armature, blender_object, export_settings)
- if blender_polygon.use_smooth or blender_mesh.use_auto_smooth:
- if blender_mesh.has_custom_normals:
- n = convert_swizzle_normal(blender_mesh.loops[loop_index].normal, armature, blender_object, export_settings)
- else:
- n = convert_swizzle_normal(vertex.normal, armature, blender_object, export_settings)
+ if use_normals:
+ n = blender_mesh.loops[loop_index].normal
+ vert += ((n[0], n[1], n[2]),)
if use_tangents:
- t = convert_swizzle_tangent(blender_mesh.loops[loop_index].tangent, armature, blender_object, export_settings)
- b = convert_swizzle_location(blender_mesh.loops[loop_index].bitangent, armature, blender_object, export_settings)
- else:
- n = convert_swizzle_normal(face_normal, armature, blender_object, export_settings)
- if use_tangents:
- t = convert_swizzle_tangent(face_tangent, armature, blender_object, export_settings)
- b = convert_swizzle_location(face_bitangent, armature, blender_object, export_settings)
-
- if use_tangents:
- tv = Vector((t[0], t[1], t[2]))
- bv = Vector((b[0], b[1], b[2]))
- nv = Vector((n[0], n[1], n[2]))
-
- if (nv.cross(tv)).dot(bv) < 0.0:
- t[3] = -1.0
-
- if blender_mesh.uv_layers.active:
- for tex_coord_index in range(0, tex_coord_max):
- uv = blender_mesh.uv_layers[tex_coord_index].data[loop_index].uv
- uvs.append([uv.x, 1.0 - uv.y])
-
- #
-
- if color_max > 0 and export_color:
- for color_index in range(0, color_max):
- color_name = COLOR_PREFIX + str(color_index)
- color = vertex_colors[color_name].data[loop_index].color
- colors.append([
- color_srgb_to_scene_linear(color[0]),
- color_srgb_to_scene_linear(color[1]),
- color_srgb_to_scene_linear(color[2]),
- color[3]
- ])
-
- #
-
- bone_count = 0
-
- # Skin must be ignored if the object is parented to a bone of the armature
- # (This creates an infinite recursive error)
- # So ignoring skin in that case
- if blender_object and blender_object.parent_type == "BONE" and blender_object.parent.name == armature.name:
- bone_max = 0 # joints & weights will be ignored in following code
- else:
- # Manage joints & weights
- if blender_vertex_groups is not None and vertex.groups is not None and len(vertex.groups) > 0 and export_settings[gltf2_blender_export_keys.SKINS]:
- joint = []
- weight = []
- vertex_groups = vertex.groups
- if not export_settings['gltf_all_vertex_influences']:
- # sort groups by weight descending
- vertex_groups = sorted(vertex.groups, key=attrgetter('weight'), reverse=True)
- for group_element in vertex_groups:
-
- if len(joint) == 4:
- bone_count += 1
- joints.append(joint)
- weights.append(weight)
- joint = []
- weight = []
-
- #
-
- joint_weight = group_element.weight
- if joint_weight <= 0.0:
+ t = blender_mesh.loops[loop_index].tangent
+ b = blender_mesh.loops[loop_index].bitangent
+ vert += ((t[0], t[1], t[2]),)
+ vert += ((b[0], b[1], b[2]),)
+ # TODO: store just bitangent_sign in vert, not whole bitangent?
+
+ for tex_coord_index in range(0, tex_coord_max):
+ uv = blender_mesh.uv_layers[tex_coord_index].data[loop_index].uv
+ uv = (uv.x, 1.0 - uv.y)
+ vert += (uv,)
+
+ for color_index in range(0, color_max):
+ color = blender_mesh.vertex_colors[color_index].data[loop_index].color
+ col = (
+ color_srgb_to_scene_linear(color[0]),
+ color_srgb_to_scene_linear(color[1]),
+ color_srgb_to_scene_linear(color[2]),
+ color[3],
+ )
+ vert += (col,)
+
+ if bone_max:
+ bones = []
+ if vertex.groups:
+ for group_element in vertex.groups:
+ weight = group_element.weight
+ if weight <= 0.0:
continue
-
- #
-
- vertex_group_index = group_element.group
-
- if vertex_group_index < 0 or vertex_group_index >= len(blender_vertex_groups):
+ try:
+ joint = group_to_joint[group_element.group]
+ except Exception:
continue
- vertex_group_name = blender_vertex_groups[vertex_group_index].name
-
- joint_index = None
-
- if armature:
- skin = gltf2_blender_gather_skins.gather_skin(armature, export_settings)
- for index, j in enumerate(skin.joints):
- if j.name == vertex_group_name:
- joint_index = index
- break
-
- #
- if joint_index is not None:
- joint.append(joint_index)
- weight.append(joint_weight)
-
- if len(joint) > 0:
- bone_count += 1
-
- for fill in range(0, 4 - len(joint)):
- joint.append(0)
- weight.append(0.0)
-
- joints.append(joint)
- weights.append(weight)
-
- for fill in range(0, bone_max - bone_count):
- joints.append([0, 0, 0, 0])
- weights.append([0.0, 0.0, 0.0, 0.0])
-
- #
-
- if morph_max > 0 and export_settings[gltf2_blender_export_keys.MORPH]:
- for morph_index in range(0, morph_max):
- blender_shape_key = blender_shape_keys[morph_index]
-
- v_morph = convert_swizzle_location(blender_shape_key.shape_key.data[vertex_index].co,
- armature, blender_object,
- export_settings)
-
- # Store delta.
- v_morph -= v
-
- target_positions.append(v_morph)
-
- #
-
- n_morph = None
-
- if blender_polygon.use_smooth:
- temp_normals = blender_shape_key.vertex_normals
- n_morph = (temp_normals[vertex_index * 3 + 0], temp_normals[vertex_index * 3 + 1],
- temp_normals[vertex_index * 3 + 2])
- else:
- temp_normals = blender_shape_key.polygon_normals
- n_morph = (
- temp_normals[blender_polygon.index * 3 + 0], temp_normals[blender_polygon.index * 3 + 1],
- temp_normals[blender_polygon.index * 3 + 2])
-
- n_morph = convert_swizzle_normal(Vector(n_morph), armature, blender_object, export_settings)
-
- # Store delta.
- n_morph -= n
-
- target_normals.append(n_morph)
-
- #
-
- if use_tangents:
- rotation = n_morph.rotation_difference(n)
-
- t_morph = Vector((t[0], t[1], t[2]))
-
- t_morph.rotate(rotation)
-
- target_tangents.append(t_morph)
-
- #
- #
+ if joint is None:
+ continue
+ bones.append((joint, weight))
+ bones.sort(key=lambda x: x[1], reverse=True)
+ bones = tuple(bones)
+ if not bones: bones = ((0, 1.0),) # HACK for verts with zero weight (#308)
+ vert += (bones,)
+
+ for shape_key in shape_keys:
+ v_morph = shape_key.shape_key.data[vertex_index].co
+ v_morph = v_morph - v # store delta
+ vert += ((v_morph[0], v_morph[1], v_morph[2]),)
+
+ if use_morph_normals:
+ normals = shape_key.split_normals
+ n_morph = Vector(normals[loop_index * 3 : loop_index * 3 + 3])
+ n_morph = n_morph - n # store delta
+ vert += ((n_morph[0], n_morph[1], n_morph[2]),)
+
+ vert_idx = prim.verts.setdefault(vert, len(prim.verts))
+ prim.indices.append(vert_idx)
- create = True
+ #
+ # Put the verts into attribute arrays.
+ #
- for current_new_index in vertex_index_to_new_indices[vertex_index]:
- found = True
+ result_primitives = []
- for i in range(0, 3):
- if attributes[POSITION_ATTRIBUTE][current_new_index * 3 + i] != v[i]:
- found = False
- break
+ for material_idx, prim in prims.items():
+ if not prim.indices:
+ continue
- if attributes[NORMAL_ATTRIBUTE][current_new_index * 3 + i] != n[i]:
- found = False
- break
+ vs = []
+ ns = []
+ ts = []
+ uvs = [[] for _ in range(tex_coord_max)]
+ cols = [[] for _ in range(color_max)]
+ joints = [[] for _ in range(bone_max)]
+ weights = [[] for _ in range(bone_max)]
+ vs_morph = [[] for _ in shape_keys]
+ ns_morph = [[] for _ in shape_keys]
+ ts_morph = [[] for _ in shape_keys]
+
+ for vert in prim.verts.keys():
+ i = 0
+
+ i += 1 # skip over Blender mesh index
+
+ v = vert[i]
+ i += 1
+ v = convert_swizzle_location(v, armature, blender_object, export_settings)
+ vs.extend(v)
+
+ if use_normals:
+ n = vert[i]
+ i += 1
+ n = convert_swizzle_normal(n, armature, blender_object, export_settings)
+ ns.extend(n)
if use_tangents:
- for i in range(0, 4):
- if attributes[TANGENT_ATTRIBUTE][current_new_index * 4 + i] != t[i]:
- found = False
- break
-
- if not found:
- continue
-
- for tex_coord_index in range(0, tex_coord_max):
- uv = uvs[tex_coord_index]
-
- tex_coord_id = TEXCOORD_PREFIX + str(tex_coord_index)
- for i in range(0, 2):
- if attributes[tex_coord_id][current_new_index * 2 + i] != uv[i]:
- found = False
- break
-
- if export_color:
- for color_index in range(0, color_max):
- color = colors[color_index]
-
- color_id = COLOR_PREFIX + str(color_index)
- for i in range(0, 3):
- # Alpha is always 1.0 - see above.
- current_color = attributes[color_id][current_new_index * 4 + i]
- if color_srgb_to_scene_linear(current_color) != color[i]:
- found = False
- break
-
- if export_settings[gltf2_blender_export_keys.SKINS]:
- for bone_index in range(0, bone_max):
- joint = joints[bone_index]
- weight = weights[bone_index]
-
- joint_id = JOINTS_PREFIX + str(bone_index)
- weight_id = WEIGHTS_PREFIX + str(bone_index)
- for i in range(0, 4):
- if attributes[joint_id][current_new_index * 4 + i] != joint[i]:
- found = False
- break
- if attributes[weight_id][current_new_index * 4 + i] != weight[i]:
- found = False
- break
-
- if export_settings[gltf2_blender_export_keys.MORPH]:
- for morph_index in range(0, morph_max):
- target_position = target_positions[morph_index]
- target_normal = target_normals[morph_index]
- if use_tangents:
- target_tangent = target_tangents[morph_index]
-
- target_position_id = MORPH_POSITION_PREFIX + str(morph_index)
- target_normal_id = MORPH_NORMAL_PREFIX + str(morph_index)
- target_tangent_id = MORPH_TANGENT_PREFIX + str(morph_index)
- for i in range(0, 3):
- if attributes[target_position_id][current_new_index * 3 + i] != target_position[i]:
- found = False
- break
- if attributes[target_normal_id][current_new_index * 3 + i] != target_normal[i]:
- found = False
- break
- if use_tangents:
- if attributes[target_tangent_id][current_new_index * 3 + i] != target_tangent[i]:
- found = False
- break
-
- if found:
- indices.append(current_new_index)
-
- create = False
- break
-
- if not create:
- continue
-
- new_index = 0
-
- if primitive.get('max_index') is not None:
- new_index = primitive['max_index'] + 1
-
- primitive['max_index'] = new_index
-
- vertex_index_to_new_indices[vertex_index].append(new_index)
-
- #
- #
-
- indices.append(new_index)
-
- #
-
- attributes[POSITION_ATTRIBUTE].extend(v)
- attributes[NORMAL_ATTRIBUTE].extend(n)
- if use_tangents:
- attributes[TANGENT_ATTRIBUTE].extend(t)
-
- if blender_mesh.uv_layers.active:
- for tex_coord_index in range(0, tex_coord_max):
- tex_coord_id = TEXCOORD_PREFIX + str(tex_coord_index)
-
- if attributes.get(tex_coord_id) is None:
- attributes[tex_coord_id] = []
-
- attributes[tex_coord_id].extend(uvs[tex_coord_index])
-
- if export_color:
- for color_index in range(0, color_max):
- color_id = COLOR_PREFIX + str(color_index)
-
- if attributes.get(color_id) is None:
- attributes[color_id] = []
-
- attributes[color_id].extend(colors[color_index])
-
- if export_settings[gltf2_blender_export_keys.SKINS]:
- for bone_index in range(0, bone_max):
- joint_id = JOINTS_PREFIX + str(bone_index)
-
- if attributes.get(joint_id) is None:
- attributes[joint_id] = []
-
- attributes[joint_id].extend(joints[bone_index])
-
- weight_id = WEIGHTS_PREFIX + str(bone_index)
-
- if attributes.get(weight_id) is None:
- attributes[weight_id] = []
-
- attributes[weight_id].extend(weights[bone_index])
-
- if export_settings[gltf2_blender_export_keys.MORPH]:
- for morph_index in range(0, morph_max):
- target_position_id = MORPH_POSITION_PREFIX + str(morph_index)
-
- if attributes.get(target_position_id) is None:
- attributes[target_position_id] = []
-
- attributes[target_position_id].extend(target_positions[morph_index])
-
- target_normal_id = MORPH_NORMAL_PREFIX + str(morph_index)
-
- if attributes.get(target_normal_id) is None:
- attributes[target_normal_id] = []
-
- attributes[target_normal_id].extend(target_normals[morph_index])
-
- if use_tangents:
- target_tangent_id = MORPH_TANGENT_PREFIX + str(morph_index)
-
- if attributes.get(target_tangent_id) is None:
- attributes[target_tangent_id] = []
-
- attributes[target_tangent_id].extend(target_tangents[morph_index])
-
- #
- # Add non-empty primitives
- #
-
- result_primitives = [
- primitive
- for primitive in material_idx_to_primitives.values()
- if len(primitive[INDICES_ID]) != 0
- ]
-
- print_console('INFO', 'Primitives created: ' + str(len(result_primitives)))
+ t = vert[i]
+ i += 1
+ t = convert_swizzle_tangent(t, armature, blender_object, export_settings)
+ ts.extend(t)
+
+ b = vert[i]
+ i += 1
+ b = convert_swizzle_tangent(b, armature, blender_object, export_settings)
+ b_sign = -1.0 if (Vector(n).cross(Vector(t))).dot(Vector(b)) < 0.0 else 1.0
+ ts.append(b_sign)
+
+ for tex_coord_index in range(0, tex_coord_max):
+ uv = vert[i]
+ i += 1
+ uvs[tex_coord_index].extend(uv)
+
+ for color_index in range(0, color_max):
+ col = vert[i]
+ i += 1
+ cols[color_index].extend(col)
+
+ if bone_max:
+ bones = vert[i]
+ i += 1
+ for j in range(0, 4 * bone_max):
+ if j < len(bones):
+ joint, weight = bones[j]
+ else:
+ joint, weight = 0, 0.0
+ joints[j//4].append(joint)
+ weights[j//4].append(weight)
+
+ for shape_key_index in range(0, len(shape_keys)):
+ v_morph = vert[i]
+ i += 1
+ v_morph = convert_swizzle_location(v_morph, armature, blender_object, export_settings)
+ vs_morph[shape_key_index].extend(v_morph)
+
+ if use_morph_normals:
+ n_morph = vert[i]
+ i += 1
+ n_morph = convert_swizzle_normal(n_morph, armature, blender_object, export_settings)
+ ns_morph[shape_key_index].extend(n_morph)
+
+ if use_morph_tangents:
+ rotation = n_morph.rotation_difference(n)
+ t_morph = Vector(t)
+ t_morph.rotate(rotation)
+ ts_morph[shape_key_index].extend(t_morph)
+
+ attributes = {}
+ attributes['POSITION'] = vs
+ if ns: attributes['NORMAL'] = ns
+ if ts: attributes['TANGENT'] = ts
+ for i, uv in enumerate(uvs): attributes['TEXCOORD_%d' % i] = uv
+ for i, col in enumerate(cols): attributes['COLOR_%d' % i] = col
+ for i, js in enumerate(joints): attributes['JOINTS_%d' % i] = js
+ for i, ws in enumerate(weights): attributes['WEIGHTS_%d' % i] = ws
+ for i, vm in enumerate(vs_morph): attributes['MORPH_POSITION_%d' % i] = vm
+ for i, nm in enumerate(ns_morph): attributes['MORPH_NORMAL_%d' % i] = nm
+ for i, tm in enumerate(ts_morph): attributes['MORPH_TANGENT_%d' % i] = tm
+
+ result_primitives.append({
+ 'attributes': attributes,
+ 'indices': prim.indices,
+ 'material': material_idx,
+ })
+
+ print_console('INFO', 'Primitives created: %d' % len(result_primitives))
return result_primitives
diff --git a/io_scene_gltf2/blender/exp/gltf2_blender_gather_animation_sampler_keyframes.py b/io_scene_gltf2/blender/exp/gltf2_blender_gather_animation_sampler_keyframes.py
index 822aa6a1..f8ab333e 100755
--- a/io_scene_gltf2/blender/exp/gltf2_blender_gather_animation_sampler_keyframes.py
+++ b/io_scene_gltf2/blender/exp/gltf2_blender_gather_animation_sampler_keyframes.py
@@ -161,8 +161,14 @@ def get_bone_matrix(blender_object_if_armature: typing.Optional[bpy.types.Object
if bake_bone is None:
matrix = pbone.matrix_basis.copy()
else:
- matrix = pbone.matrix
- matrix = blender_object_if_armature.convert_space(pose_bone=pbone, matrix=matrix, from_space='POSE', to_space='LOCAL')
+ if (pbone.bone.use_inherit_rotation == False or pbone.bone.inherit_scale != "FULL") and pbone.parent != None:
+ rest_mat = (pbone.parent.bone.matrix_local.inverted_safe() @ pbone.bone.matrix_local)
+ matrix = (rest_mat.inverted_safe() @ pbone.parent.matrix.inverted_safe() @ pbone.matrix)
+ else:
+ matrix = pbone.matrix
+ matrix = blender_object_if_armature.convert_space(pose_bone=pbone, matrix=matrix, from_space='POSE', to_space='LOCAL')
+
+
data[frame][pbone.name] = matrix
diff --git a/io_scene_gltf2/blender/exp/gltf2_blender_gather_animation_samplers.py b/io_scene_gltf2/blender/exp/gltf2_blender_gather_animation_samplers.py
index f2375bb1..c3913367 100755
--- a/io_scene_gltf2/blender/exp/gltf2_blender_gather_animation_samplers.py
+++ b/io_scene_gltf2/blender/exp/gltf2_blender_gather_animation_samplers.py
@@ -343,10 +343,12 @@ def __gather_output(channels: typing.Tuple[bpy.types.FCurve],
(0.0, 0.0, 1.0, 0.0),
(0.0, -1.0, 0.0, 0.0),
(0.0, 0.0, 0.0, 1.0)))
- correction_matrix_local = gltf2_blender_math.multiply(axis_basis_change, bone.bone.matrix_local)
+ correction_matrix_local = axis_basis_change @ bone.bone.matrix_local
else:
- correction_matrix_local = gltf2_blender_math.multiply(
- bone.parent.bone.matrix_local.inverted(), bone.bone.matrix_local)
+ correction_matrix_local = (
+ bone.parent.bone.matrix_local.inverted() @
+ bone.bone.matrix_local
+ )
transform = correction_matrix_local
else:
diff --git a/io_scene_gltf2/blender/exp/gltf2_blender_gather_cameras.py b/io_scene_gltf2/blender/exp/gltf2_blender_gather_cameras.py
index 585f0be3..bb211fe2 100755
--- a/io_scene_gltf2/blender/exp/gltf2_blender_gather_cameras.py
+++ b/io_scene_gltf2/blender/exp/gltf2_blender_gather_cameras.py
@@ -92,18 +92,18 @@ def __gather_perspective(blender_camera, export_settings):
width = bpy.context.scene.render.pixel_aspect_x * bpy.context.scene.render.resolution_x
height = bpy.context.scene.render.pixel_aspect_y * bpy.context.scene.render.resolution_y
- perspective.aspectRatio = width / height
+ perspective.aspect_ratio = width / height
if width >= height:
if blender_camera.sensor_fit != 'VERTICAL':
- perspective.yfov = 2.0 * math.atan(math.tan(blender_camera.angle * 0.5) / perspective.aspectRatio)
+ perspective.yfov = 2.0 * math.atan(math.tan(blender_camera.angle * 0.5) / perspective.aspect_ratio)
else:
perspective.yfov = blender_camera.angle
else:
if blender_camera.sensor_fit != 'HORIZONTAL':
perspective.yfov = blender_camera.angle
else:
- perspective.yfov = 2.0 * math.atan(math.tan(blender_camera.angle * 0.5) / perspective.aspectRatio)
+ perspective.yfov = 2.0 * math.atan(math.tan(blender_camera.angle * 0.5) / perspective.aspect_ratio)
perspective.znear = blender_camera.clip_start
perspective.zfar = blender_camera.clip_end
diff --git a/io_scene_gltf2/blender/exp/gltf2_blender_gather_joints.py b/io_scene_gltf2/blender/exp/gltf2_blender_gather_joints.py
index af086c1b..dff55d17 100755
--- a/io_scene_gltf2/blender/exp/gltf2_blender_gather_joints.py
+++ b/io_scene_gltf2/blender/exp/gltf2_blender_gather_joints.py
@@ -17,8 +17,6 @@ import mathutils
from . import gltf2_blender_export_keys
from io_scene_gltf2.blender.exp.gltf2_blender_gather_cache import cached
from io_scene_gltf2.io.com import gltf2_io
-from io_scene_gltf2.blender.exp import gltf2_blender_extract
-from io_scene_gltf2.blender.com import gltf2_blender_math
from io_scene_gltf2.blender.exp import gltf2_blender_gather_skins
from io_scene_gltf2.io.exp.gltf2_io_user_extensions import export_user_extensions
from ..com.gltf2_blender_extras import generate_extras
@@ -39,13 +37,21 @@ def gather_joint(blender_object, blender_bone, export_settings):
# extract bone transform
if blender_bone.parent is None:
- correction_matrix_local = gltf2_blender_math.multiply(axis_basis_change, blender_bone.bone.matrix_local)
+ correction_matrix_local = axis_basis_change @ blender_bone.bone.matrix_local
else:
- correction_matrix_local = gltf2_blender_math.multiply(
- blender_bone.parent.bone.matrix_local.inverted(), blender_bone.bone.matrix_local)
- matrix_basis = blender_bone.matrix_basis
- trans, rot, sca = gltf2_blender_extract.decompose_transition(
- gltf2_blender_math.multiply(correction_matrix_local, matrix_basis), export_settings)
+ correction_matrix_local = (
+ blender_bone.parent.bone.matrix_local.inverted() @
+ blender_bone.bone.matrix_local
+ )
+
+ if (blender_bone.bone.use_inherit_rotation == False or blender_bone.bone.inherit_scale != "FULL") and blender_bone.parent != None:
+ rest_mat = (blender_bone.parent.bone.matrix_local.inverted_safe() @ blender_bone.bone.matrix_local)
+ matrix_basis = (rest_mat.inverted_safe() @ blender_bone.parent.matrix.inverted_safe() @ blender_bone.matrix)
+ else:
+ matrix_basis = blender_bone.matrix
+ matrix_basis = blender_object.convert_space(pose_bone=blender_bone, matrix=matrix_basis, from_space='POSE', to_space='LOCAL')
+
+ trans, rot, sca = (correction_matrix_local @ matrix_basis).decompose()
translation, rotation, scale = (None, None, None)
if trans[0] != 0.0 or trans[1] != 0.0 or trans[2] != 0.0:
translation = [trans[0], trans[1], trans[2]]
diff --git a/io_scene_gltf2/blender/exp/gltf2_blender_gather_materials_pbr_metallic_roughness.py b/io_scene_gltf2/blender/exp/gltf2_blender_gather_materials_pbr_metallic_roughness.py
index 54493799..7913d175 100755
--- a/io_scene_gltf2/blender/exp/gltf2_blender_gather_materials_pbr_metallic_roughness.py
+++ b/io_scene_gltf2/blender/exp/gltf2_blender_gather_materials_pbr_metallic_roughness.py
@@ -47,6 +47,9 @@ def __filter_pbr_material(blender_material, export_settings):
def __gather_base_color_factor(blender_material, export_settings):
+ alpha_socket = gltf2_blender_get.get_socket(blender_material, "Alpha")
+ alpha = alpha_socket.default_value if alpha_socket is not None and not alpha_socket.is_linked else 1.0
+
base_color_socket = gltf2_blender_get.get_socket(blender_material, "Base Color")
if base_color_socket is None:
base_color_socket = gltf2_blender_get.get_socket(blender_material, "BaseColor")
@@ -57,7 +60,7 @@ def __gather_base_color_factor(blender_material, export_settings):
if not isinstance(base_color_socket, bpy.types.NodeSocket):
return None
if not base_color_socket.is_linked:
- return list(base_color_socket.default_value)
+ return list(base_color_socket.default_value)[:3] + [alpha]
texture_node = __get_tex_from_socket(base_color_socket)
if texture_node is None:
@@ -85,7 +88,7 @@ def __gather_base_color_factor(blender_material, export_settings):
.format(multiply_node.name))
return None
- return list(factor_socket.default_value)
+ return list(factor_socket.default_value)[:3] + [alpha]
def __gather_base_color_texture(blender_material, export_settings):
diff --git a/io_scene_gltf2/blender/exp/gltf2_blender_gather_nodes.py b/io_scene_gltf2/blender/exp/gltf2_blender_gather_nodes.py
index 83984c2b..b09e7aa1 100755
--- a/io_scene_gltf2/blender/exp/gltf2_blender_gather_nodes.py
+++ b/io_scene_gltf2/blender/exp/gltf2_blender_gather_nodes.py
@@ -189,8 +189,8 @@ def __gather_children(blender_object, blender_scene, export_settings):
rot_quat = Quaternion(rot)
axis_basis_change = Matrix(
((1.0, 0.0, 0.0, 0.0), (0.0, 0.0, -1.0, 0.0), (0.0, 1.0, 0.0, 0.0), (0.0, 0.0, 0.0, 1.0)))
- mat = gltf2_blender_math.multiply(child.matrix_parent_inverse, child.matrix_basis)
- mat = gltf2_blender_math.multiply(mat, axis_basis_change)
+ mat = child.matrix_parent_inverse @ child.matrix_basis
+ mat = mat @ axis_basis_change
_, rot_quat, _ = mat.decompose()
child_node.rotation = [rot_quat[1], rot_quat[2], rot_quat[3], rot_quat[0]]
@@ -248,6 +248,9 @@ def __gather_matrix(blender_object, export_settings):
def __gather_mesh(blender_object, library, export_settings):
+ if blender_object.type in ['CURVE', 'SURFACE', 'FONT']:
+ return __gather_mesh_from_nonmesh(blender_object, library, export_settings)
+
if blender_object.type != "MESH":
return None
@@ -338,6 +341,49 @@ def __gather_mesh(blender_object, library, export_settings):
return result
+def __gather_mesh_from_nonmesh(blender_object, library, export_settings):
+ """Handles curves, surfaces, text, etc."""
+ needs_to_mesh_clear = False
+ try:
+ # Convert to a mesh
+ try:
+ if export_settings[gltf2_blender_export_keys.APPLY]:
+ depsgraph = bpy.context.evaluated_depsgraph_get()
+ blender_mesh_owner = blender_object.evaluated_get(depsgraph)
+ blender_mesh = blender_mesh_owner.to_mesh(preserve_all_data_layers=True, depsgraph=depsgraph)
+ # TODO: do we need preserve_all_data_layers?
+
+ else:
+ blender_mesh_owner = blender_object
+ blender_mesh = blender_mesh_owner.to_mesh()
+
+ except Exception:
+ return None
+
+ needs_to_mesh_clear = True
+
+ skip_filter = True
+ material_names = tuple([ms.material.name for ms in blender_object.material_slots if ms.material is not None])
+ vertex_groups = None
+ modifiers = None
+ blender_object_for_skined_data = None
+
+ result = gltf2_blender_gather_mesh.gather_mesh(blender_mesh,
+ library,
+ blender_object_for_skined_data,
+ vertex_groups,
+ modifiers,
+ skip_filter,
+ material_names,
+ export_settings)
+
+ finally:
+ if needs_to_mesh_clear:
+ blender_mesh_owner.to_mesh_clear()
+
+ return result
+
+
def __gather_name(blender_object, export_settings):
return blender_object.name
@@ -358,7 +404,7 @@ def __gather_trans_rot_scale(blender_object, export_settings):
if blender_object.matrix_local[3][3] != 0.0:
- trans, rot, sca = gltf2_blender_extract.decompose_transition(blender_object.matrix_local, export_settings)
+ trans, rot, sca = blender_object.matrix_local.decompose()
else:
# Some really weird cases, scale is null (if parent is null when evaluation is done)
print_console('WARNING', 'Some nodes are 0 scaled during evaluation. Result can be wrong')
diff --git a/io_scene_gltf2/blender/exp/gltf2_blender_gather_primitive_attributes.py b/io_scene_gltf2/blender/exp/gltf2_blender_gather_primitive_attributes.py
index f5856257..61adea89 100755
--- a/io_scene_gltf2/blender/exp/gltf2_blender_gather_primitive_attributes.py
+++ b/io_scene_gltf2/blender/exp/gltf2_blender_gather_primitive_attributes.py
@@ -12,12 +12,13 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+import numpy as np
+
from . import gltf2_blender_export_keys
from io_scene_gltf2.io.com import gltf2_io
from io_scene_gltf2.io.com import gltf2_io_constants
from io_scene_gltf2.io.com import gltf2_io_debug
from io_scene_gltf2.io.exp import gltf2_io_binary_data
-from io_scene_gltf2.blender.exp import gltf2_blender_utils
def gather_primitive_attributes(blender_primitive, export_settings):
@@ -36,72 +37,79 @@ def gather_primitive_attributes(blender_primitive, export_settings):
return attributes
+def array_to_accessor(array, component_type, data_type, include_max_and_min=False):
+ dtype = gltf2_io_constants.ComponentType.to_numpy_dtype(component_type)
+ num_elems = gltf2_io_constants.DataType.num_elements(data_type)
+
+ if type(array) is not np.ndarray:
+ array = np.array(array, dtype=dtype)
+ array = array.reshape(len(array) // num_elems, num_elems)
+
+ assert array.dtype == dtype
+ assert array.shape[1] == num_elems
+
+ amax = None
+ amin = None
+ if include_max_and_min:
+ amax = np.amax(array, axis=0).tolist()
+ amin = np.amin(array, axis=0).tolist()
+
+ return gltf2_io.Accessor(
+ buffer_view=gltf2_io_binary_data.BinaryData(array.tobytes()),
+ byte_offset=None,
+ component_type=component_type,
+ count=len(array),
+ extensions=None,
+ extras=None,
+ max=amax,
+ min=amin,
+ name=None,
+ normalized=None,
+ sparse=None,
+ type=data_type,
+ )
+
+
def __gather_position(blender_primitive, export_settings):
position = blender_primitive["attributes"]["POSITION"]
- componentType = gltf2_io_constants.ComponentType.Float
return {
- "POSITION": gltf2_io.Accessor(
- buffer_view=gltf2_io_binary_data.BinaryData.from_list(position, componentType),
- byte_offset=None,
- component_type=componentType,
- count=len(position) // gltf2_io_constants.DataType.num_elements(gltf2_io_constants.DataType.Vec3),
- extensions=None,
- extras=None,
- max=gltf2_blender_utils.max_components(position, gltf2_io_constants.DataType.Vec3),
- min=gltf2_blender_utils.min_components(position, gltf2_io_constants.DataType.Vec3),
- name=None,
- normalized=None,
- sparse=None,
- type=gltf2_io_constants.DataType.Vec3
+ "POSITION": array_to_accessor(
+ position,
+ component_type=gltf2_io_constants.ComponentType.Float,
+ data_type=gltf2_io_constants.DataType.Vec3,
+ include_max_and_min=True
)
}
def __gather_normal(blender_primitive, export_settings):
- if export_settings[gltf2_blender_export_keys.NORMALS]:
- normal = blender_primitive["attributes"]['NORMAL']
- return {
- "NORMAL": gltf2_io.Accessor(
- buffer_view=gltf2_io_binary_data.BinaryData.from_list(normal, gltf2_io_constants.ComponentType.Float),
- byte_offset=None,
- component_type=gltf2_io_constants.ComponentType.Float,
- count=len(normal) // gltf2_io_constants.DataType.num_elements(gltf2_io_constants.DataType.Vec3),
- extensions=None,
- extras=None,
- max=None,
- min=None,
- name=None,
- normalized=None,
- sparse=None,
- type=gltf2_io_constants.DataType.Vec3
- )
- }
- return {}
+ if not export_settings[gltf2_blender_export_keys.NORMALS]:
+ return {}
+ normal = blender_primitive["attributes"].get('NORMAL')
+ if not normal:
+ return {}
+ return {
+ "NORMAL": array_to_accessor(
+ normal,
+ component_type=gltf2_io_constants.ComponentType.Float,
+ data_type=gltf2_io_constants.DataType.Vec3,
+ )
+ }
def __gather_tangent(blender_primitive, export_settings):
- if export_settings[gltf2_blender_export_keys.TANGENTS]:
- if blender_primitive["attributes"].get('TANGENT') is not None:
- tangent = blender_primitive["attributes"]['TANGENT']
- return {
- "TANGENT": gltf2_io.Accessor(
- buffer_view=gltf2_io_binary_data.BinaryData.from_list(
- tangent, gltf2_io_constants.ComponentType.Float),
- byte_offset=None,
- component_type=gltf2_io_constants.ComponentType.Float,
- count=len(tangent) // gltf2_io_constants.DataType.num_elements(gltf2_io_constants.DataType.Vec4),
- extensions=None,
- extras=None,
- max=None,
- min=None,
- name=None,
- normalized=None,
- sparse=None,
- type=gltf2_io_constants.DataType.Vec4
- )
- }
-
- return {}
+ if not export_settings[gltf2_blender_export_keys.TANGENTS]:
+ return {}
+ tangent = blender_primitive["attributes"].get('TANGENT')
+ if not tangent:
+ return {}
+ return {
+ "TANGENT": array_to_accessor(
+ tangent,
+ component_type=gltf2_io_constants.ComponentType.Float,
+ data_type=gltf2_io_constants.DataType.Vec4,
+ )
+ }
def __gather_texcoord(blender_primitive, export_settings):
@@ -111,20 +119,10 @@ def __gather_texcoord(blender_primitive, export_settings):
tex_coord_id = 'TEXCOORD_' + str(tex_coord_index)
while blender_primitive["attributes"].get(tex_coord_id) is not None:
tex_coord = blender_primitive["attributes"][tex_coord_id]
- attributes[tex_coord_id] = gltf2_io.Accessor(
- buffer_view=gltf2_io_binary_data.BinaryData.from_list(
- tex_coord, gltf2_io_constants.ComponentType.Float),
- byte_offset=None,
+ attributes[tex_coord_id] = array_to_accessor(
+ tex_coord,
component_type=gltf2_io_constants.ComponentType.Float,
- count=len(tex_coord) // gltf2_io_constants.DataType.num_elements(gltf2_io_constants.DataType.Vec2),
- extensions=None,
- extras=None,
- max=None,
- min=None,
- name=None,
- normalized=None,
- sparse=None,
- type=gltf2_io_constants.DataType.Vec2
+ data_type=gltf2_io_constants.DataType.Vec2,
)
tex_coord_index += 1
tex_coord_id = 'TEXCOORD_' + str(tex_coord_index)
@@ -138,20 +136,10 @@ def __gather_colors(blender_primitive, export_settings):
color_id = 'COLOR_' + str(color_index)
while blender_primitive["attributes"].get(color_id) is not None:
internal_color = blender_primitive["attributes"][color_id]
- attributes[color_id] = gltf2_io.Accessor(
- buffer_view=gltf2_io_binary_data.BinaryData.from_list(
- internal_color, gltf2_io_constants.ComponentType.Float),
- byte_offset=None,
+ attributes[color_id] = array_to_accessor(
+ internal_color,
component_type=gltf2_io_constants.ComponentType.Float,
- count=len(internal_color) // gltf2_io_constants.DataType.num_elements(gltf2_io_constants.DataType.Vec4),
- extensions=None,
- extras=None,
- max=None,
- min=None,
- name=None,
- normalized=None,
- sparse=None,
- type=gltf2_io_constants.DataType.Vec4
+ data_type=gltf2_io_constants.DataType.Vec4,
)
color_index += 1
color_id = 'COLOR_' + str(color_index)
@@ -173,20 +161,13 @@ def __gather_skins(blender_primitive, export_settings):
# joints
internal_joint = blender_primitive["attributes"][joint_id]
- joint = gltf2_io.Accessor(
- buffer_view=gltf2_io_binary_data.BinaryData.from_list(
- internal_joint, gltf2_io_constants.ComponentType.UnsignedShort),
- byte_offset=None,
- component_type=gltf2_io_constants.ComponentType.UnsignedShort,
- count=len(internal_joint) // gltf2_io_constants.DataType.num_elements(gltf2_io_constants.DataType.Vec4),
- extensions=None,
- extras=None,
- max=None,
- min=None,
- name=None,
- normalized=None,
- sparse=None,
- type=gltf2_io_constants.DataType.Vec4
+ component_type = gltf2_io_constants.ComponentType.UnsignedShort
+ if max(internal_joint) < 256:
+ component_type = gltf2_io_constants.ComponentType.UnsignedByte
+ joint = array_to_accessor(
+ internal_joint,
+ component_type,
+ data_type=gltf2_io_constants.DataType.Vec4,
)
attributes[joint_id] = joint
@@ -201,21 +182,10 @@ def __gather_skins(blender_primitive, export_settings):
factor = 1.0 / total
internal_weight[idx:idx + 4] = [w * factor for w in weight_slice]
- weight = gltf2_io.Accessor(
- buffer_view=gltf2_io_binary_data.BinaryData.from_list(
- internal_weight, gltf2_io_constants.ComponentType.Float),
- byte_offset=None,
+ weight = array_to_accessor(
+ internal_weight,
component_type=gltf2_io_constants.ComponentType.Float,
- count=len(internal_weight) // gltf2_io_constants.DataType.num_elements(
- gltf2_io_constants.DataType.Vec4),
- extensions=None,
- extras=None,
- max=None,
- min=None,
- name=None,
- normalized=None,
- sparse=None,
- type=gltf2_io_constants.DataType.Vec4
+ data_type=gltf2_io_constants.DataType.Vec4,
)
attributes[weight_id] = weight
diff --git a/io_scene_gltf2/blender/exp/gltf2_blender_gather_primitives.py b/io_scene_gltf2/blender/exp/gltf2_blender_gather_primitives.py
index 22f0bc6d..1a2ae00d 100755
--- a/io_scene_gltf2/blender/exp/gltf2_blender_gather_primitives.py
+++ b/io_scene_gltf2/blender/exp/gltf2_blender_gather_primitives.py
@@ -21,7 +21,6 @@ from io_scene_gltf2.blender.exp.gltf2_blender_gather_cache import cached
from io_scene_gltf2.blender.exp import gltf2_blender_extract
from io_scene_gltf2.blender.exp import gltf2_blender_gather_accessors
from io_scene_gltf2.blender.exp import gltf2_blender_gather_primitive_attributes
-from io_scene_gltf2.blender.exp import gltf2_blender_utils
from io_scene_gltf2.blender.exp import gltf2_blender_gather_materials
from io_scene_gltf2.io.com import gltf2_io
@@ -160,26 +159,11 @@ def __gather_targets(blender_primitive, blender_mesh, modifiers, export_settings
if blender_primitive["attributes"].get(target_position_id):
target = {}
internal_target_position = blender_primitive["attributes"][target_position_id]
- binary_data = gltf2_io_binary_data.BinaryData.from_list(
+ target["POSITION"] = gltf2_blender_gather_primitive_attributes.array_to_accessor(
internal_target_position,
- gltf2_io_constants.ComponentType.Float
- )
- target["POSITION"] = gltf2_io.Accessor(
- buffer_view=binary_data,
- byte_offset=None,
component_type=gltf2_io_constants.ComponentType.Float,
- count=len(internal_target_position) // gltf2_io_constants.DataType.num_elements(
- gltf2_io_constants.DataType.Vec3),
- extensions=None,
- extras=None,
- max=gltf2_blender_utils.max_components(
- internal_target_position, gltf2_io_constants.DataType.Vec3),
- min=gltf2_blender_utils.min_components(
- internal_target_position, gltf2_io_constants.DataType.Vec3),
- name=None,
- normalized=None,
- sparse=None,
- type=gltf2_io_constants.DataType.Vec3
+ data_type=gltf2_io_constants.DataType.Vec3,
+ include_max_and_min=True,
)
if export_settings[NORMALS] \
@@ -187,48 +171,20 @@ def __gather_targets(blender_primitive, blender_mesh, modifiers, export_settings
and blender_primitive["attributes"].get(target_normal_id):
internal_target_normal = blender_primitive["attributes"][target_normal_id]
- binary_data = gltf2_io_binary_data.BinaryData.from_list(
+ target['NORMAL'] = gltf2_blender_gather_primitive_attributes.array_to_accessor(
internal_target_normal,
- gltf2_io_constants.ComponentType.Float,
- )
- target['NORMAL'] = gltf2_io.Accessor(
- buffer_view=binary_data,
- byte_offset=None,
component_type=gltf2_io_constants.ComponentType.Float,
- count=len(internal_target_normal) // gltf2_io_constants.DataType.num_elements(
- gltf2_io_constants.DataType.Vec3),
- extensions=None,
- extras=None,
- max=None,
- min=None,
- name=None,
- normalized=None,
- sparse=None,
- type=gltf2_io_constants.DataType.Vec3
+ data_type=gltf2_io_constants.DataType.Vec3,
)
if export_settings[TANGENTS] \
and export_settings[MORPH_TANGENT] \
and blender_primitive["attributes"].get(target_tangent_id):
internal_target_tangent = blender_primitive["attributes"][target_tangent_id]
- binary_data = gltf2_io_binary_data.BinaryData.from_list(
+ target['TANGENT'] = gltf2_blender_gather_primitive_attributes.array_to_accessor(
internal_target_tangent,
- gltf2_io_constants.ComponentType.Float,
- )
- target['TANGENT'] = gltf2_io.Accessor(
- buffer_view=binary_data,
- byte_offset=None,
component_type=gltf2_io_constants.ComponentType.Float,
- count=len(internal_target_tangent) // gltf2_io_constants.DataType.num_elements(
- gltf2_io_constants.DataType.Vec3),
- extensions=None,
- extras=None,
- max=None,
- min=None,
- name=None,
- normalized=None,
- sparse=None,
- type=gltf2_io_constants.DataType.Vec3
+ data_type=gltf2_io_constants.DataType.Vec3,
)
targets.append(target)
morph_index += 1
diff --git a/io_scene_gltf2/blender/exp/gltf2_blender_gather_skins.py b/io_scene_gltf2/blender/exp/gltf2_blender_gather_skins.py
index fa95e543..7f645272 100755
--- a/io_scene_gltf2/blender/exp/gltf2_blender_gather_skins.py
+++ b/io_scene_gltf2/blender/exp/gltf2_blender_gather_skins.py
@@ -20,7 +20,6 @@ from io_scene_gltf2.io.exp import gltf2_io_binary_data
from io_scene_gltf2.io.com import gltf2_io_constants
from io_scene_gltf2.blender.exp import gltf2_blender_gather_accessors
from io_scene_gltf2.blender.exp import gltf2_blender_gather_joints
-from io_scene_gltf2.blender.com import gltf2_blender_math
from io_scene_gltf2.io.exp.gltf2_io_user_extensions import export_user_extensions
@@ -85,10 +84,10 @@ def __gather_inverse_bind_matrices(blender_object, export_settings):
# traverse the matrices in the same order as the joints and compute the inverse bind matrix
def __collect_matrices(bone):
- inverse_bind_matrix = gltf2_blender_math.multiply(
- axis_basis_change,
- gltf2_blender_math.multiply(
- blender_object.matrix_world,
+ inverse_bind_matrix = (
+ axis_basis_change @
+ (
+ blender_object.matrix_world @
bone.bone.matrix_local
)
).inverted()
diff --git a/io_scene_gltf2/blender/exp/gltf2_blender_utils.py b/io_scene_gltf2/blender/exp/gltf2_blender_utils.py
deleted file mode 100755
index 8d5baae7..00000000
--- a/io_scene_gltf2/blender/exp/gltf2_blender_utils.py
+++ /dev/null
@@ -1,67 +0,0 @@
-# Copyright 2018 The glTF-Blender-IO authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import math
-from io_scene_gltf2.io.com import gltf2_io_constants
-
-
-# TODO: we could apply functional programming to these problems (currently we only have a single use case)
-
-def split_list_by_data_type(l: list, data_type: gltf2_io_constants.DataType):
- """
- Split a flat list of components by their data type.
-
- E.g.: A list [0,1,2,3,4,5] of data type Vec3 would be split to [[0,1,2], [3,4,5]]
- :param l: the flat list
- :param data_type: the data type of the list
- :return: a list of lists, where each element list contains the components of the data type
- """
- if not (len(l) % gltf2_io_constants.DataType.num_elements(data_type) == 0):
- raise ValueError("List length does not match specified data type")
- num_elements = gltf2_io_constants.DataType.num_elements(data_type)
- return [l[i:i + num_elements] for i in range(0, len(l), num_elements)]
-
-
-def max_components(l: list, data_type: gltf2_io_constants.DataType) -> list:
- """
- Find the maximum components in a flat list.
-
- This is required, for example, for the glTF2.0 accessor min and max properties
- :param l: the flat list of components
- :param data_type: the data type of the list (determines the length of the result)
- :return: a list with length num_elements(data_type) containing the maximum per component along the list
- """
- components_lists = split_list_by_data_type(l, data_type)
- result = [-math.inf] * gltf2_io_constants.DataType.num_elements(data_type)
- for components in components_lists:
- for i, c in enumerate(components):
- result[i] = max(result[i], c)
- return result
-
-
-def min_components(l: list, data_type: gltf2_io_constants.DataType) -> list:
- """
- Find the minimum components in a flat list.
-
- This is required, for example, for the glTF2.0 accessor min and max properties
- :param l: the flat list of components
- :param data_type: the data type of the list (determines the length of the result)
- :return: a list with length num_elements(data_type) containing the minimum per component along the list
- """
- components_lists = split_list_by_data_type(l, data_type)
- result = [math.inf] * gltf2_io_constants.DataType.num_elements(data_type)
- for components in components_lists:
- for i, c in enumerate(components):
- result[i] = min(result[i], c)
- return result
diff --git a/io_scene_gltf2/blender/imp/gltf2_blender_KHR_materials_pbrSpecularGlossiness.py b/io_scene_gltf2/blender/imp/gltf2_blender_KHR_materials_pbrSpecularGlossiness.py
index ce5e1aed..bb1bf272 100755
--- a/io_scene_gltf2/blender/imp/gltf2_blender_KHR_materials_pbrSpecularGlossiness.py
+++ b/io_scene_gltf2/blender/imp/gltf2_blender_KHR_materials_pbrSpecularGlossiness.py
@@ -79,7 +79,8 @@ def pbr_specular_glossiness(mh):
)
if mh.pymat.occlusion_texture is not None:
- node = make_settings_node(mh, location=(610, -1060))
+ node = make_settings_node(mh)
+ node.location = (610, -1060)
occlusion(
mh,
location=(510, -970),
diff --git a/io_scene_gltf2/blender/imp/gltf2_blender_gltf.py b/io_scene_gltf2/blender/imp/gltf2_blender_gltf.py
index efa7f003..226720a3 100755
--- a/io_scene_gltf2/blender/imp/gltf2_blender_gltf.py
+++ b/io_scene_gltf2/blender/imp/gltf2_blender_gltf.py
@@ -50,9 +50,9 @@ class BlenderGlTF():
@staticmethod
def set_convert_functions(gltf):
- yup2zup = bpy.app.debug_value != 100
+ gltf.yup2zup = bpy.app.debug_value != 100
- if yup2zup:
+ if gltf.yup2zup:
# glTF Y-Up space --> Blender Z-up space
# X,Y,Z --> X,-Z,Y
def convert_loc(x): return Vector([x[0], -x[2], x[1]])
diff --git a/io_scene_gltf2/blender/imp/gltf2_blender_mesh.py b/io_scene_gltf2/blender/imp/gltf2_blender_mesh.py
index 7914a41b..e13b9c8f 100755
--- a/io_scene_gltf2/blender/imp/gltf2_blender_mesh.py
+++ b/io_scene_gltf2/blender/imp/gltf2_blender_mesh.py
@@ -13,11 +13,12 @@
# limitations under the License.
import bpy
-import bmesh
+from mathutils import Vector, Matrix
+import numpy as np
+from ...io.imp.gltf2_io_binary import BinaryData
from ..com.gltf2_blender_extras import set_extras
from .gltf2_blender_material import BlenderMaterial
-from .gltf2_blender_primitive import BlenderPrimitive
class BlenderMesh():
@@ -28,118 +29,614 @@ class BlenderMesh():
@staticmethod
def create(gltf, mesh_idx, skin_idx):
"""Mesh creation."""
- pymesh = gltf.data.meshes[mesh_idx]
+ return create_mesh(gltf, mesh_idx, skin_idx)
- # Create one bmesh, add all primitives to it, and then convert it to a
- # mesh.
- bme = bmesh.new()
- # List of all the materials this mesh will use. The material each
- # primitive uses is set by giving an index into this list.
- materials = []
+# Maximum number of TEXCOORD_n/COLOR_n sets to import
+UV_MAX = 8
+COLOR_MAX = 8
- # Process all primitives
- for prim in pymesh.primitives:
- if prim.material is None:
- material_idx = None
- else:
- pymaterial = gltf.data.materials[prim.material]
-
- vertex_color = None
- if 'COLOR_0' in prim.attributes:
- vertex_color = 'COLOR_0'
- # Create Blender material if needed
- if vertex_color not in pymaterial.blender_material:
- BlenderMaterial.create(gltf, prim.material, vertex_color)
- material_name = pymaterial.blender_material[vertex_color]
- material = bpy.data.materials[material_name]
+def create_mesh(gltf, mesh_idx, skin_idx):
+ pymesh = gltf.data.meshes[mesh_idx]
+ name = pymesh.name or 'Mesh_%d' % mesh_idx
+ mesh = bpy.data.meshes.new(name)
- try:
- material_idx = materials.index(material.name)
- except ValueError:
- materials.append(material.name)
- material_idx = len(materials) - 1
+ # Temporarily parent the mesh to an object.
+ # This is used to set skin weights and shapekeys.
+ tmp_ob = None
+ try:
+ tmp_ob = bpy.data.objects.new('##gltf-import:tmp-object##', mesh)
+ do_primitives(gltf, mesh_idx, skin_idx, mesh, tmp_ob)
- BlenderPrimitive.add_primitive_to_bmesh(gltf, bme, pymesh, prim, skin_idx, material_idx)
+ finally:
+ if tmp_ob:
+ bpy.data.objects.remove(tmp_ob)
- name = pymesh.name or 'Mesh_' + str(mesh_idx)
- mesh = bpy.data.meshes.new(name)
- BlenderMesh.bmesh_to_mesh(gltf, pymesh, bme, mesh)
- bme.free()
- for name_material in materials:
- mesh.materials.append(bpy.data.materials[name_material])
- mesh.update()
+ return mesh
- set_extras(mesh, pymesh.extras, exclude=['targetNames'])
- # Clear accessor cache after all primitives are done
- gltf.accessor_cache = {}
+def do_primitives(gltf, mesh_idx, skin_idx, mesh, ob):
+ """Put all primitive data into the mesh."""
+ pymesh = gltf.data.meshes[mesh_idx]
- return mesh
+ # Scan the primitives to find out what we need to create
- @staticmethod
- def bmesh_to_mesh(gltf, pymesh, bme, mesh):
- bme.to_mesh(mesh)
-
- # Unfortunately need to do shapekeys/normals/smoothing ourselves.
-
- # Shapekeys
- if len(bme.verts.layers.shape) != 0:
- # The only way I could find to create a shape key was to temporarily
- # parent mesh to an object and use obj.shape_key_add.
- tmp_ob = None
- try:
- tmp_ob = bpy.data.objects.new('##gltf-import:tmp-object##', mesh)
- tmp_ob.shape_key_add(name='Basis')
- mesh.shape_keys.name = mesh.name
- for layer_name in bme.verts.layers.shape.keys():
- tmp_ob.shape_key_add(name=layer_name)
- key_block = mesh.shape_keys.key_blocks[layer_name]
- layer = bme.verts.layers.shape[layer_name]
-
- for i, v in enumerate(bme.verts):
- key_block.data[i].co = v[layer]
- finally:
- if tmp_ob:
- bpy.data.objects.remove(tmp_ob)
-
- # Normals
- mesh.update()
+ has_normals = False
+ num_uvs = 0
+ num_cols = 0
+ num_joint_sets = 0
+ for prim in pymesh.primitives:
+ if 'POSITION' not in prim.attributes:
+ continue
if gltf.import_settings['import_shading'] == "NORMALS":
- mesh.create_normals_split()
-
- use_smooths = [] # whether to smooth for each poly
- face_idx = 0
- for prim in pymesh.primitives:
- if gltf.import_settings['import_shading'] == "FLAT" or \
- 'NORMAL' not in prim.attributes:
- use_smooths += [False] * prim.num_faces
- elif gltf.import_settings['import_shading'] == "SMOOTH":
- use_smooths += [True] * prim.num_faces
- elif gltf.import_settings['import_shading'] == "NORMALS":
- mesh_loops = mesh.loops
- for fi in range(face_idx, face_idx + prim.num_faces):
- poly = mesh.polygons[fi]
- # "Flat normals" are when all the vertices in poly have the
- # poly's normal. Otherwise, smooth the poly.
- for loop_idx in range(poly.loop_start, poly.loop_start + poly.loop_total):
- vi = mesh_loops[loop_idx].vertex_index
- if poly.normal.dot(bme.verts[vi].normal) <= 0.9999999:
- use_smooths.append(True)
- break
- else:
- use_smooths.append(False)
+ if 'NORMAL' in prim.attributes:
+ has_normals = True
+
+ if skin_idx is not None:
+ i = 0
+ while ('JOINTS_%d' % i) in prim.attributes and \
+ ('WEIGHTS_%d' % i) in prim.attributes:
+ i += 1
+ num_joint_sets = max(i, num_joint_sets)
+
+ i = 0
+ while i < UV_MAX and ('TEXCOORD_%d' % i) in prim.attributes: i += 1
+ num_uvs = max(i, num_uvs)
+
+ i = 0
+ while i < COLOR_MAX and ('COLOR_%d' % i) in prim.attributes: i += 1
+ num_cols = max(i, num_cols)
+
+ num_shapekeys = 0
+ for morph_i, _ in enumerate(pymesh.primitives[0].targets or []):
+ if pymesh.shapekey_names[morph_i] is not None:
+ num_shapekeys += 1
+
+ # -------------
+ # We'll process all the primitives gathering arrays to feed into the
+ # various foreach_set function that create the mesh data.
+
+ num_faces = 0 # total number of faces
+ vert_locs = np.empty(dtype=np.float32, shape=(0,3)) # coordinate for each vert
+ vert_normals = np.empty(dtype=np.float32, shape=(0,3)) # normal for each vert
+ edge_vidxs = np.array([], dtype=np.uint32) # vertex_index for each loose edge
+ loop_vidxs = np.array([], dtype=np.uint32) # vertex_index for each loop
+ loop_uvs = [
+ np.empty(dtype=np.float32, shape=(0,2)) # UV for each loop for each layer
+ for _ in range(num_uvs)
+ ]
+ loop_cols = [
+ np.empty(dtype=np.float32, shape=(0,4)) # color for each loop for each layer
+ for _ in range(num_cols)
+ ]
+ vert_joints = [
+ np.empty(dtype=np.uint32, shape=(0,4)) # 4 joints for each vert for each set
+ for _ in range(num_joint_sets)
+ ]
+ vert_weights = [
+ np.empty(dtype=np.float32, shape=(0,4)) # 4 weights for each vert for each set
+ for _ in range(num_joint_sets)
+ ]
+ sk_vert_locs = [
+ np.empty(dtype=np.float32, shape=(0,3)) # coordinate for each vert for each shapekey
+ for _ in range(num_shapekeys)
+ ]
+
+ for prim in pymesh.primitives:
+ prim.num_faces = 0
+
+ if 'POSITION' not in prim.attributes:
+ continue
+
+ vert_index_base = len(vert_locs)
+
+ if prim.indices is not None:
+ indices = BinaryData.decode_accessor(gltf, prim.indices)
+ indices = indices.reshape(len(indices))
+ else:
+ num_verts = gltf.data.accessors[prim.attributes['POSITION']].count
+ indices = np.arange(0, num_verts, dtype=np.uint32)
+
+ mode = 4 if prim.mode is None else prim.mode
+ points, edges, tris = points_edges_tris(mode, indices)
+ if points is not None:
+ indices = points
+ elif edges is not None:
+ indices = edges
+ else:
+ indices = tris
+
+ # We'll add one vert to the arrays for each index used in indices
+ unique_indices, inv_indices = np.unique(indices, return_inverse=True)
+
+ vs = BinaryData.decode_accessor(gltf, prim.attributes['POSITION'], cache=True)
+ vert_locs = np.concatenate((vert_locs, vs[unique_indices]))
+
+ if has_normals:
+ if 'NORMAL' in prim.attributes:
+ ns = BinaryData.decode_accessor(gltf, prim.attributes['NORMAL'], cache=True)
+ ns = ns[unique_indices]
else:
- # shouldn't happen
- assert False
+ ns = np.zeros((len(unique_indices), 3), dtype=np.float32)
+ vert_normals = np.concatenate((vert_normals, ns))
- face_idx += prim.num_faces
- mesh.polygons.foreach_set('use_smooth', use_smooths)
+ for i in range(num_joint_sets):
+ if ('JOINTS_%d' % i) in prim.attributes and ('WEIGHTS_%d' % i) in prim.attributes:
+ js = BinaryData.decode_accessor(gltf, prim.attributes['JOINTS_%d' % i], cache=True)
+ ws = BinaryData.decode_accessor(gltf, prim.attributes['WEIGHTS_%d' % i], cache=True)
+ js = js[unique_indices]
+ ws = ws[unique_indices]
+ else:
+ js = np.zeros((len(unique_indices), 4), dtype=np.uint32)
+ ws = np.zeros((len(unique_indices), 4), dtype=np.float32)
+ vert_joints[i] = np.concatenate((vert_joints[i], js))
+ vert_weights[i] = np.concatenate((vert_weights[i], ws))
- # Custom normals, now that every update is done
- if gltf.import_settings['import_shading'] == "NORMALS":
- custom_normals = [v.normal for v in bme.verts]
- mesh.normals_split_custom_set_from_vertices(custom_normals)
- mesh.use_auto_smooth = True
+ for morph_i, target in enumerate(prim.targets or []):
+ if pymesh.shapekey_names[morph_i] is None:
+ continue
+ morph_vs = BinaryData.decode_accessor(gltf, target['POSITION'], cache=True)
+ morph_vs = morph_vs[unique_indices]
+ sk_vert_locs[morph_i] = np.concatenate((sk_vert_locs[morph_i], morph_vs))
+
+ # inv_indices are the indices into the verts just for this prim;
+ # calculate indices into the overall verts array
+ prim_vidxs = inv_indices.astype(np.uint32, copy=False)
+ prim_vidxs += vert_index_base # offset for verts from previous prims
+
+ if edges is not None:
+ edge_vidxs = np.concatenate((edge_vidxs, prim_vidxs))
+
+ if tris is not None:
+ prim.num_faces = len(indices) // 3
+ num_faces += prim.num_faces
+
+ loop_vidxs = np.concatenate((loop_vidxs, prim_vidxs))
+
+ for uv_i in range(num_uvs):
+ if ('TEXCOORD_%d' % uv_i) in prim.attributes:
+ uvs = BinaryData.decode_accessor(gltf, prim.attributes['TEXCOORD_%d' % uv_i], cache=True)
+ uvs = uvs[indices]
+ else:
+ uvs = np.zeros((len(indices), 2), dtype=np.float32)
+ loop_uvs[uv_i] = np.concatenate((loop_uvs[uv_i], uvs))
+
+ for col_i in range(num_cols):
+ if ('COLOR_%d' % col_i) in prim.attributes:
+ cols = BinaryData.decode_accessor(gltf, prim.attributes['COLOR_%d' % col_i], cache=True)
+ cols = cols[indices]
+ if cols.shape[1] == 3:
+ cols = colors_rgb_to_rgba(cols)
+ else:
+ cols = np.ones((len(indices), 4), dtype=np.float32)
+ loop_cols[col_i] = np.concatenate((loop_cols[col_i], cols))
+
+ # Accessors are cached in case they are shared between primitives; clear
+ # the cache now that all prims are done.
+ gltf.decode_accessor_cache = {}
+
+ if gltf.import_settings['merge_vertices']:
+ vert_locs, vert_normals, vert_joints, vert_weights, \
+ sk_vert_locs, loop_vidxs, edge_vidxs = \
+ merge_duplicate_verts(
+ vert_locs, vert_normals, vert_joints, vert_weights, \
+ sk_vert_locs, loop_vidxs, edge_vidxs\
+ )
+
+ # ---------------
+ # Convert all the arrays glTF -> Blender
+
+ # Change from relative to absolute positions for morph locs
+ for sk_locs in sk_vert_locs:
+ sk_locs += vert_locs
+
+ if gltf.yup2zup:
+ locs_yup_to_zup(vert_locs)
+ locs_yup_to_zup(vert_normals)
+ for sk_locs in sk_vert_locs:
+ locs_yup_to_zup(sk_locs)
+
+ if num_joint_sets:
+ skin_into_bind_pose(
+ gltf, skin_idx, vert_joints, vert_weights,
+ locs=[vert_locs] + sk_vert_locs,
+ vert_normals=vert_normals,
+ )
+
+ for uvs in loop_uvs:
+ uvs_gltf_to_blender(uvs)
+
+ for cols in loop_cols:
+ colors_linear_to_srgb(cols[:, :-1])
+
+ # ---------------
+ # Start creating things
+
+ mesh.vertices.add(len(vert_locs))
+ mesh.vertices.foreach_set('co', squish(vert_locs))
+
+ mesh.loops.add(len(loop_vidxs))
+ mesh.loops.foreach_set('vertex_index', loop_vidxs)
+
+ mesh.edges.add(len(edge_vidxs) // 2)
+ mesh.edges.foreach_set('vertices', edge_vidxs)
+
+ mesh.polygons.add(num_faces)
+
+ # All polys are tris
+ loop_starts = np.arange(0, 3 * num_faces, step=3)
+ loop_totals = np.full(num_faces, 3)
+ mesh.polygons.foreach_set('loop_start', loop_starts)
+ mesh.polygons.foreach_set('loop_total', loop_totals)
+
+ for uv_i in range(num_uvs):
+ name = 'UVMap' if uv_i == 0 else 'UVMap.%03d' % uv_i
+ layer = mesh.uv_layers.new(name=name)
+ layer.data.foreach_set('uv', squish(loop_uvs[uv_i]))
+
+ for col_i in range(num_cols):
+ name = 'Col' if col_i == 0 else 'Col.%03d' % col_i
+ layer = mesh.vertex_colors.new(name=name)
+
+ layer.data.foreach_set('color', squish(loop_cols[col_i]))
+
+ # Skinning
+ # TODO: this is slow :/
+ if num_joint_sets:
+ pyskin = gltf.data.skins[skin_idx]
+ for i, _ in enumerate(pyskin.joints):
+ # ob is a temp object, so don't worry about the name.
+ ob.vertex_groups.new(name='X%d' % i)
+
+ vgs = list(ob.vertex_groups)
+
+ for i in range(num_joint_sets):
+ js = vert_joints[i].tolist() # tolist() is faster
+ ws = vert_weights[i].tolist()
+ for vi in range(len(vert_locs)):
+ w0, w1, w2, w3 = ws[vi]
+ j0, j1, j2, j3 = js[vi]
+ if w0 != 0: vgs[j0].add((vi,), w0, 'REPLACE')
+ if w1 != 0: vgs[j1].add((vi,), w1, 'REPLACE')
+ if w2 != 0: vgs[j2].add((vi,), w2, 'REPLACE')
+ if w3 != 0: vgs[j3].add((vi,), w3, 'REPLACE')
+
+ # Shapekeys
+ if num_shapekeys:
+ ob.shape_key_add(name='Basis')
+ mesh.shape_keys.name = mesh.name
+
+ sk_i = 0
+ for sk_name in pymesh.shapekey_names:
+ if sk_name is None:
+ continue
+
+ ob.shape_key_add(name=sk_name)
+ key_block = mesh.shape_keys.key_blocks[sk_name]
+ key_block.data.foreach_set('co', squish(sk_vert_locs[sk_i]))
+
+ sk_i += 1
+
+ # ----
+ # Assign materials to faces
+
+ # Initialize to no-material, ie. an index guaranteed to be OOB for the
+ # material slots. A mesh obviously can't have more materials than it has
+ # primitives...
+ oob_material_idx = len(pymesh.primitives)
+ material_indices = np.full(num_faces, oob_material_idx)
+
+ f = 0
+ for prim in pymesh.primitives:
+ if prim.material is not None:
+ # Get the material
+ pymaterial = gltf.data.materials[prim.material]
+ vertex_color = 'COLOR_0' if 'COLOR_0' in prim.attributes else None
+ if vertex_color not in pymaterial.blender_material:
+ BlenderMaterial.create(gltf, prim.material, vertex_color)
+ material_name = pymaterial.blender_material[vertex_color]
+
+ # Put material in slot (if not there)
+ if material_name not in mesh.materials:
+ mesh.materials.append(bpy.data.materials[material_name])
+ material_index = mesh.materials.find(material_name)
+
+ material_indices[f:f + prim.num_faces].fill(material_index)
+
+ f += prim.num_faces
+
+ mesh.polygons.foreach_set('material_index', material_indices)
+
+ # ----
+ # Normals
+
+ # Set poly smoothing
+ # TODO: numpyify?
+ smooths = [] # use_smooth for each poly
+ f = 0
+ for prim in pymesh.primitives:
+ if gltf.import_settings['import_shading'] == "FLAT" or \
+ 'NORMAL' not in prim.attributes:
+ smooths += [False] * prim.num_faces
+
+ elif gltf.import_settings['import_shading'] == "SMOOTH":
+ smooths += [True] * prim.num_faces
+
+ elif gltf.import_settings['import_shading'] == "NORMALS":
+ for fi in range(f, f + prim.num_faces):
+ # Make the face flat if the face's normal is
+ # equal to all of its loops' normals.
+ poly_normal = mesh.polygons[fi].normal
+ smooths.append(
+ poly_normal.dot(vert_normals[loop_vidxs[3*fi + 0]]) <= 0.9999999 or
+ poly_normal.dot(vert_normals[loop_vidxs[3*fi + 1]]) <= 0.9999999 or
+ poly_normal.dot(vert_normals[loop_vidxs[3*fi + 2]]) <= 0.9999999
+ )
+
+ f += prim.num_faces
+
+ mesh.polygons.foreach_set('use_smooth', smooths)
+
+ mesh.validate()
+ has_loose_edges = len(edge_vidxs) != 0 # need to calc_loose_edges for them to show up
+ mesh.update(calc_edges_loose=has_loose_edges)
+
+ if has_normals:
+ mesh.create_normals_split()
+ mesh.normals_split_custom_set_from_vertices(vert_normals)
+ mesh.use_auto_smooth = True
+
+
+def points_edges_tris(mode, indices):
+ points = None
+ edges = None
+ tris = None
+
+ if mode == 0:
+ # POINTS
+ points = indices
+
+ elif mode == 1:
+ # LINES
+ # 1 3
+ # / /
+ # 0 2
+ edges = indices
+
+ elif mode == 2:
+ # LINE LOOP
+ # 1---2
+ # / \
+ # 0-------3
+ # in: 0123
+ # out: 01122330
+ edges = np.empty(2 * len(indices), dtype=np.uint32)
+ edges[[0, -1]] = indices[[0, 0]] # 0______0
+ edges[1:-1] = np.repeat(indices[1:], 2) # 01122330
+
+ elif mode == 3:
+ # LINE STRIP
+ # 1---2
+ # / \
+ # 0 3
+ # in: 0123
+ # out: 011223
+ edges = np.empty(2 * len(indices) - 2, dtype=np.uint32)
+ edges[[0, -1]] = indices[[0, -1]] # 0____3
+ edges[1:-1] = np.repeat(indices[1:-1], 2) # 011223
+
+ elif mode == 4:
+ # TRIANGLES
+ # 2 3
+ # / \ / \
+ # 0---1 4---5
+ tris = indices
+
+ elif mode == 5:
+ # TRIANGLE STRIP
+ # 0---2---4
+ # \ / \ /
+ # 1---3
+ # TODO: numpyify
+ def alternate(i, xs):
+ even = i % 2 == 0
+ return xs if even else (xs[0], xs[2], xs[1])
+ tris = np.array([
+ alternate(i, (indices[i], indices[i + 1], indices[i + 2]))
+ for i in range(0, len(indices) - 2)
+ ])
+ tris = squish(tris)
+
+ elif mode == 6:
+ # TRIANGLE FAN
+ # 3---2
+ # / \ / \
+ # 4---0---1
+ # TODO: numpyify
+ tris = np.array([
+ (indices[0], indices[i], indices[i + 1])
+ for i in range(1, len(indices) - 1)
+ ])
+ tris = squish(tris)
+
+ else:
+ raise Exception('primitive mode unimplemented: %d' % mode)
+
+ return points, edges, tris
+
+
+def squish(array):
+ """Squish nD array into 1D array (required by foreach_set)."""
+ return array.reshape(array.size)
+
+
+def colors_rgb_to_rgba(rgb):
+ rgba = np.ones((len(rgb), 4), dtype=np.float32)
+ rgba[:, :3] = rgb
+ return rgba
+
+
+def colors_linear_to_srgb(color):
+ assert color.shape[1] == 3 # only change RGB, not A
+
+ not_small = color >= 0.0031308
+ small_result = np.where(color < 0.0, 0.0, color * 12.92)
+ large_result = 1.055 * np.power(color, 1.0 / 2.4, where=not_small) - 0.055
+ color[:] = np.where(not_small, large_result, small_result)
+
+
+def locs_yup_to_zup(vecs):
+ # x,y,z -> x,-z,y
+ vecs[:, [1,2]] = vecs[:, [2,1]]
+ vecs[:, 1] *= -1
+
+
+def uvs_gltf_to_blender(uvs):
+ # u,v -> u,1-v
+ uvs[:, 1] *= -1
+ uvs[:, 1] += 1
+
+
+def skin_into_bind_pose(gltf, skin_idx, vert_joints, vert_weights, locs, vert_normals):
+ # Skin each position/normal using the bind pose.
+ # Skinning equation: vert' = sum_(j,w) w * joint_mat[j] * vert
+ # where the sum is over all (joint,weight) pairs.
+
+ # Calculate joint matrices
+ joint_mats = []
+ pyskin = gltf.data.skins[skin_idx]
+ if pyskin.inverse_bind_matrices is not None:
+ inv_binds = BinaryData.get_data_from_accessor(gltf, pyskin.inverse_bind_matrices)
+ inv_binds = [gltf.matrix_gltf_to_blender(m) for m in inv_binds]
+ else:
+ inv_binds = [Matrix.Identity(4) for i in range(len(pyskin.joints))]
+ bind_mats = [gltf.vnodes[joint].bind_arma_mat for joint in pyskin.joints]
+ joint_mats = [bind_mat @ inv_bind for bind_mat, inv_bind in zip(bind_mats, inv_binds)]
+
+ # TODO: check if joint_mats are all (approximately) 1, and skip skinning
+
+ joint_mats = np.array(joint_mats, dtype=np.float32)
+
+ # Compute the skinning matrices for every vert
+ num_verts = len(locs[0])
+ skinning_mats = np.zeros((num_verts, 4, 4), dtype=np.float32)
+ weight_sums = np.zeros(num_verts, dtype=np.float32)
+ for js, ws in zip(vert_joints, vert_weights):
+ for i in range(4):
+ skinning_mats += ws[:, i].reshape(len(ws), 1, 1) * joint_mats[js[:, i]]
+ weight_sums += ws[:, i]
+ # Normalize weights to one; necessary for old files / quantized weights
+ skinning_mats /= weight_sums.reshape(num_verts, 1, 1)
+
+ skinning_mats_3x3 = skinning_mats[:, :3, :3]
+ skinning_trans = skinning_mats[:, :3, 3]
+
+ for vs in locs:
+ vs[:] = mul_mats_vecs(skinning_mats_3x3, vs)
+ vs[:] += skinning_trans
+
+ if len(vert_normals) != 0:
+ vert_normals[:] = mul_mats_vecs(skinning_mats_3x3, vert_normals)
+ # Don't translate normals!
+ normalize_vecs(vert_normals)
+
+
+def mul_mats_vecs(mats, vecs):
+ """Given [m1,m2,...] and [v1,v2,...], returns [m1@v1,m2@v2,...]. 3D only."""
+ return np.matmul(mats, vecs.reshape(len(vecs), 3, 1)).reshape(len(vecs), 3)
+
+
+def normalize_vecs(vectors):
+ norms = np.linalg.norm(vectors, axis=1, keepdims=True)
+ np.divide(vectors, norms, out=vectors, where=norms != 0)
+
+
+def merge_duplicate_verts(vert_locs, vert_normals, vert_joints, vert_weights, sk_vert_locs, loop_vidxs, edge_vidxs):
+ # This function attempts to invert the splitting done when exporting to
+ # glTF. Welds together verts with the same per-vert data (but possibly
+ # different per-loop data).
+ #
+ # Ideally normals would be treated as per-loop data, but that has problems,
+ # so we currently treat the normal as per-vert.
+ #
+ # Strategy is simple: put all the per-vert data into an array of structs
+ # ("dots"), dedupe with np.unique, then take all the data back out.
+
+ # Very often two verts that "morally" should be merged will have normals
+ # with very small differences. Round off the normals to smooth this over.
+ if len(vert_normals) != 0:
+ vert_normals *= 50000
+ vert_normals[:] = np.trunc(vert_normals)
+ vert_normals *= (1/50000)
+
+ dot_fields = [('x', np.float32), ('y', np.float32), ('z', np.float32)]
+ if len(vert_normals) != 0:
+ dot_fields += [('nx', np.float32), ('ny', np.float32), ('nz', np.float32)]
+ for i, _ in enumerate(vert_joints):
+ dot_fields += [
+ ('joint%dx' % i, np.uint32), ('joint%dy' % i, np.uint32),
+ ('joint%dz' % i, np.uint32), ('joint%dw' % i, np.uint32),
+ ('weight%dx' % i, np.float32), ('weight%dy' % i, np.float32),
+ ('weight%dz' % i, np.float32), ('weight%dw' % i, np.float32),
+ ]
+ for i, _ in enumerate(sk_vert_locs):
+ dot_fields += [
+ ('sk%dx' % i, np.float32), ('sk%dy' % i, np.float32), ('sk%dz' % i, np.float32),
+ ]
+ dots = np.empty(len(vert_locs), dtype=np.dtype(dot_fields))
+
+ dots['x'] = vert_locs[:, 0]
+ dots['y'] = vert_locs[:, 1]
+ dots['z'] = vert_locs[:, 2]
+ if len(vert_normals) != 0:
+ dots['nx'] = vert_normals[:, 0]
+ dots['ny'] = vert_normals[:, 1]
+ dots['nz'] = vert_normals[:, 2]
+ for i, (joints, weights) in enumerate(zip(vert_joints, vert_weights)):
+ dots['joint%dx' % i] = joints[:, 0]
+ dots['joint%dy' % i] = joints[:, 1]
+ dots['joint%dz' % i] = joints[:, 2]
+ dots['joint%dw' % i] = joints[:, 3]
+ dots['weight%dx' % i] = weights[:, 0]
+ dots['weight%dy' % i] = weights[:, 1]
+ dots['weight%dz' % i] = weights[:, 2]
+ dots['weight%dw' % i] = weights[:, 3]
+ for i, locs in enumerate(sk_vert_locs):
+ dots['sk%dx' % i] = locs[:, 0]
+ dots['sk%dy' % i] = locs[:, 1]
+ dots['sk%dz' % i] = locs[:, 2]
+
+ unique_dots, inv_indices = np.unique(dots, return_inverse=True)
+
+ loop_vidxs = inv_indices[loop_vidxs]
+ edge_vidxs = inv_indices[edge_vidxs]
+
+ vert_locs = np.empty((len(unique_dots), 3), dtype=np.float32)
+ vert_locs[:, 0] = unique_dots['x']
+ vert_locs[:, 1] = unique_dots['y']
+ vert_locs[:, 2] = unique_dots['z']
+ if len(vert_normals) != 0:
+ vert_normals = np.empty((len(unique_dots), 3), dtype=np.float32)
+ vert_normals[:, 0] = unique_dots['nx']
+ vert_normals[:, 1] = unique_dots['ny']
+ vert_normals[:, 2] = unique_dots['nz']
+ for i in range(len(vert_joints)):
+ vert_joints[i] = np.empty((len(unique_dots), 4), dtype=np.uint32)
+ vert_joints[i][:, 0] = unique_dots['joint%dx' % i]
+ vert_joints[i][:, 1] = unique_dots['joint%dy' % i]
+ vert_joints[i][:, 2] = unique_dots['joint%dz' % i]
+ vert_joints[i][:, 3] = unique_dots['joint%dw' % i]
+ vert_weights[i] = np.empty((len(unique_dots), 4), dtype=np.float32)
+ vert_weights[i][:, 0] = unique_dots['weight%dx' % i]
+ vert_weights[i][:, 1] = unique_dots['weight%dy' % i]
+ vert_weights[i][:, 2] = unique_dots['weight%dz' % i]
+ vert_weights[i][:, 3] = unique_dots['weight%dw' % i]
+ for i in range(len(sk_vert_locs)):
+ sk_vert_locs[i] = np.empty((len(unique_dots), 3), dtype=np.float32)
+ sk_vert_locs[i][:, 0] = unique_dots['sk%dx' % i]
+ sk_vert_locs[i][:, 1] = unique_dots['sk%dy' % i]
+ sk_vert_locs[i][:, 2] = unique_dots['sk%dz' % i]
+
+ return vert_locs, vert_normals, vert_joints, vert_weights, sk_vert_locs, loop_vidxs, edge_vidxs
diff --git a/io_scene_gltf2/blender/imp/gltf2_blender_pbrMetallicRoughness.py b/io_scene_gltf2/blender/imp/gltf2_blender_pbrMetallicRoughness.py
index 00bd08d2..deb9e301 100755
--- a/io_scene_gltf2/blender/imp/gltf2_blender_pbrMetallicRoughness.py
+++ b/io_scene_gltf2/blender/imp/gltf2_blender_pbrMetallicRoughness.py
@@ -233,7 +233,7 @@ def base_color(
base_color_factor = [1, 1, 1, 1]
if base_color_texture is None and not mh.vertex_color:
- color_socket.default_value = base_color_factor
+ color_socket.default_value = base_color_factor[:3] + [1]
if alpha_socket is not None:
alpha_socket.default_value = base_color_factor[3]
return
@@ -242,10 +242,7 @@ def base_color(
needs_color_factor = base_color_factor[:3] != [1, 1, 1]
needs_alpha_factor = base_color_factor[3] != 1.0 and alpha_socket is not None
if needs_color_factor or needs_alpha_factor:
- # For now, always create the color factor node because the exporter
- # reads the alpha value from here. Can get rid of "or needs_alpha_factor"
- # when it learns to understand the alpha socket.
- if needs_color_factor or needs_alpha_factor:
+ if needs_color_factor:
node = mh.node_tree.nodes.new('ShaderNodeMixRGB')
node.label = 'Color Factor'
node.location = x - 140, y
@@ -255,7 +252,7 @@ def base_color(
# Inputs
node.inputs['Fac'].default_value = 1.0
color_socket = node.inputs['Color1']
- node.inputs['Color2'].default_value = base_color_factor
+ node.inputs['Color2'].default_value = base_color_factor[:3] + [1]
if needs_alpha_factor:
node = mh.node_tree.nodes.new('ShaderNodeMath')
diff --git a/io_scene_gltf2/blender/imp/gltf2_blender_primitive.py b/io_scene_gltf2/blender/imp/gltf2_blender_primitive.py
deleted file mode 100755
index d544778c..00000000
--- a/io_scene_gltf2/blender/imp/gltf2_blender_primitive.py
+++ /dev/null
@@ -1,344 +0,0 @@
-# Copyright 2018-2019 The glTF-Blender-IO authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import bpy
-from mathutils import Vector, Matrix
-
-from ...io.imp.gltf2_io_binary import BinaryData
-from ...io.com.gltf2_io_color_management import color_linear_to_srgb
-from ...io.com import gltf2_io_debug
-
-
-MAX_NUM_COLOR_SETS = 8
-MAX_NUM_TEXCOORD_SETS = 8
-
-class BlenderPrimitive():
- """Blender Primitive."""
- def __new__(cls, *args, **kwargs):
- raise RuntimeError("%s should not be instantiated" % cls)
-
- @staticmethod
- def get_layer(bme_layers, name):
- if name not in bme_layers:
- return bme_layers.new(name)
- return bme_layers[name]
-
- @staticmethod
- def add_primitive_to_bmesh(gltf, bme, pymesh, pyprimitive, skin_idx, material_index):
- attributes = pyprimitive.attributes
-
- if 'POSITION' not in attributes:
- pyprimitive.num_faces = 0
- return
-
- positions = BinaryData.get_data_from_accessor(gltf, attributes['POSITION'], cache=True)
-
- if pyprimitive.indices is not None:
- # Not using cache, this is not useful for indices
- indices = BinaryData.get_data_from_accessor(gltf, pyprimitive.indices)
- indices = [i[0] for i in indices]
- else:
- indices = list(range(len(positions)))
-
- bme_verts = bme.verts
- bme_edges = bme.edges
- bme_faces = bme.faces
-
- # Gather up the joints/weights (multiple sets allow >4 influences)
- joint_sets = []
- weight_sets = []
- set_num = 0
- while 'JOINTS_%d' % set_num in attributes and 'WEIGHTS_%d' % set_num in attributes:
- joint_data = BinaryData.get_data_from_accessor(gltf, attributes['JOINTS_%d' % set_num], cache=True)
- weight_data = BinaryData.get_data_from_accessor(gltf, attributes['WEIGHTS_%d' % set_num], cache=True)
-
- joint_sets.append(joint_data)
- weight_sets.append(weight_data)
-
- set_num += 1
-
- # For skinned meshes, we will need to calculate the position of the
- # verts in the bind pose, ie. the pose the edit bones are in.
- if skin_idx is not None:
- pyskin = gltf.data.skins[skin_idx]
- if pyskin.inverse_bind_matrices is not None:
- inv_binds = BinaryData.get_data_from_accessor(gltf, pyskin.inverse_bind_matrices)
- inv_binds = [gltf.matrix_gltf_to_blender(m) for m in inv_binds]
- else:
- inv_binds = [Matrix.Identity(4) for i in range(len(pyskin.joints))]
- bind_mats = [gltf.vnodes[joint].bind_arma_mat for joint in pyskin.joints]
- joint_mats = [bind_mat @ inv_bind for bind_mat, inv_bind in zip(bind_mats, inv_binds)]
-
- def skin_vert(pos, pidx):
- out = Vector((0, 0, 0))
- # Spec says weights should already sum to 1 but some models
- # don't do it (ex. CesiumMan), so normalize.
- weight_sum = 0
- for joint_set, weight_set in zip(joint_sets, weight_sets):
- for j in range(0, 4):
- weight = weight_set[pidx][j]
- if weight != 0.0:
- weight_sum += weight
- joint = joint_set[pidx][j]
- out += weight * (joint_mats[joint] @ pos)
- out /= weight_sum
- return out
-
- def skin_normal(norm, pidx):
- # TODO: not sure this is right
- norm = Vector([norm[0], norm[1], norm[2], 0])
- out = Vector((0, 0, 0, 0))
- weight_sum = 0
- for joint_set, weight_set in zip(joint_sets, weight_sets):
- for j in range(0, 4):
- weight = weight_set[pidx][j]
- if weight != 0.0:
- weight_sum += weight
- joint = joint_set[pidx][j]
- out += weight * (joint_mats[joint] @ norm)
- out /= weight_sum
- out = out.to_3d().normalized()
- return out
-
- # Every vertex has an index into the primitive's attribute arrays and a
- # *different* index into the BMesh's list of verts. Call the first one the
- # pidx and the second the bidx. Need to keep them straight!
-
- # The pidx of all the vertices that are actually used by the primitive (only
- # indices that appear in the pyprimitive.indices list are actually used)
- used_pidxs = set(indices)
- # Contains a pair (bidx, pidx) for every vertex in the primitive
- vert_idxs = []
- # pidx_to_bidx[pidx] will be the bidx of the vertex with that pidx (or -1 if
- # unused)
- pidx_to_bidx = [-1] * len(positions)
- bidx = len(bme_verts)
- if bpy.app.debug:
- used_pidxs = list(used_pidxs)
- used_pidxs.sort()
- for pidx in used_pidxs:
- pos = gltf.loc_gltf_to_blender(positions[pidx])
- if skin_idx is not None:
- pos = skin_vert(pos, pidx)
-
- bme_verts.new(pos)
- vert_idxs.append((bidx, pidx))
- pidx_to_bidx[pidx] = bidx
- bidx += 1
- bme_verts.ensure_lookup_table()
-
- # Add edges/faces to bmesh
- mode = 4 if pyprimitive.mode is None else pyprimitive.mode
- edges, faces = BlenderPrimitive.edges_and_faces(mode, indices)
- # NOTE: edges and vertices are in terms of pidxs!
- for edge in edges:
- try:
- bme_edges.new((
- bme_verts[pidx_to_bidx[edge[0]]],
- bme_verts[pidx_to_bidx[edge[1]]],
- ))
- except ValueError:
- # Ignores duplicate/degenerate edges
- pass
- pyprimitive.num_faces = 0
- for face in faces:
- try:
- face = bme_faces.new((
- bme_verts[pidx_to_bidx[face[0]]],
- bme_verts[pidx_to_bidx[face[1]]],
- bme_verts[pidx_to_bidx[face[2]]],
- ))
-
- if material_index is not None:
- face.material_index = material_index
-
- pyprimitive.num_faces += 1
-
- except ValueError:
- # Ignores duplicate/degenerate faces
- pass
-
- # Set normals
- if 'NORMAL' in attributes:
- normals = BinaryData.get_data_from_accessor(gltf, attributes['NORMAL'], cache=True)
-
- if skin_idx is None:
- for bidx, pidx in vert_idxs:
- bme_verts[bidx].normal = gltf.normal_gltf_to_blender(normals[pidx])
- else:
- for bidx, pidx in vert_idxs:
- normal = gltf.normal_gltf_to_blender(normals[pidx])
- bme_verts[bidx].normal = skin_normal(normal, pidx)
-
- # Set vertex colors. Add them in the order COLOR_0, COLOR_1, etc.
- set_num = 0
- while 'COLOR_%d' % set_num in attributes:
- if set_num >= MAX_NUM_COLOR_SETS:
- gltf2_io_debug.print_console("WARNING",
- "too many color sets; COLOR_%d will be ignored" % set_num
- )
- break
-
- layer_name = 'Col' if set_num == 0 else 'Col.%03d' % set_num
- layer = BlenderPrimitive.get_layer(bme.loops.layers.color, layer_name)
-
- colors = BinaryData.get_data_from_accessor(gltf, attributes['COLOR_%d' % set_num], cache=True)
- is_rgba = len(colors[0]) == 4
-
- for bidx, pidx in vert_idxs:
- color = colors[pidx]
- col = (
- color_linear_to_srgb(color[0]),
- color_linear_to_srgb(color[1]),
- color_linear_to_srgb(color[2]),
- color[3] if is_rgba else 1.0,
- )
- for loop in bme_verts[bidx].link_loops:
- loop[layer] = col
-
- set_num += 1
-
- # Set texcoords
- set_num = 0
- while 'TEXCOORD_%d' % set_num in attributes:
- if set_num >= MAX_NUM_TEXCOORD_SETS:
- gltf2_io_debug.print_console("WARNING",
- "too many UV sets; TEXCOORD_%d will be ignored" % set_num
- )
- break
-
- layer_name = 'UVMap' if set_num == 0 else 'UVMap.%03d' % set_num
- layer = BlenderPrimitive.get_layer(bme.loops.layers.uv, layer_name)
-
- uvs = BinaryData.get_data_from_accessor(gltf, attributes['TEXCOORD_%d' % set_num], cache=True)
-
- for bidx, pidx in vert_idxs:
- # UV transform
- u, v = uvs[pidx]
- uv = (u, 1 - v)
-
- for loop in bme_verts[bidx].link_loops:
- loop[layer].uv = uv
-
- set_num += 1
-
- # Set joints/weights for skinning
- if joint_sets:
- layer = BlenderPrimitive.get_layer(bme.verts.layers.deform, 'Vertex Weights')
-
- for joint_set, weight_set in zip(joint_sets, weight_sets):
- for bidx, pidx in vert_idxs:
- for j in range(0, 4):
- weight = weight_set[pidx][j]
- if weight != 0.0:
- joint = joint_set[pidx][j]
- bme_verts[bidx][layer][joint] = weight
-
- # Set morph target positions (no normals/tangents)
- for sk, target in enumerate(pyprimitive.targets or []):
- if pymesh.shapekey_names[sk] is None:
- continue
-
- layer_name = pymesh.shapekey_names[sk]
- layer = BlenderPrimitive.get_layer(bme.verts.layers.shape, layer_name)
-
- morph_positions = BinaryData.get_data_from_accessor(gltf, target['POSITION'], cache=True)
-
- if skin_idx is None:
- for bidx, pidx in vert_idxs:
- bme_verts[bidx][layer] = (
- gltf.loc_gltf_to_blender(positions[pidx]) +
- gltf.loc_gltf_to_blender(morph_positions[pidx])
- )
- else:
- for bidx, pidx in vert_idxs:
- pos = (
- gltf.loc_gltf_to_blender(positions[pidx]) +
- gltf.loc_gltf_to_blender(morph_positions[pidx])
- )
- bme_verts[bidx][layer] = skin_vert(pos, pidx)
-
- @staticmethod
- def edges_and_faces(mode, indices):
- """Converts the indices in a particular primitive mode into standard lists of
- edges (pairs of indices) and faces (tuples of CCW indices).
- """
- es = []
- fs = []
-
- if mode == 0:
- # POINTS
- pass
- elif mode == 1:
- # LINES
- # 1 3
- # / /
- # 0 2
- es = [
- (indices[i], indices[i + 1])
- for i in range(0, len(indices), 2)
- ]
- elif mode == 2:
- # LINE LOOP
- # 1---2
- # / \
- # 0-------3
- es = [
- (indices[i], indices[i + 1])
- for i in range(0, len(indices) - 1)
- ]
- es.append((indices[-1], indices[0]))
- elif mode == 3:
- # LINE STRIP
- # 1---2
- # / \
- # 0 3
- es = [
- (indices[i], indices[i + 1])
- for i in range(0, len(indices) - 1)
- ]
- elif mode == 4:
- # TRIANGLES
- # 2 3
- # / \ / \
- # 0---1 4---5
- fs = [
- (indices[i], indices[i + 1], indices[i + 2])
- for i in range(0, len(indices), 3)
- ]
- elif mode == 5:
- # TRIANGLE STRIP
- # 0---2---4
- # \ / \ /
- # 1---3
- def alternate(i, xs):
- even = i % 2 == 0
- return xs if even else (xs[0], xs[2], xs[1])
- fs = [
- alternate(i, (indices[i], indices[i + 1], indices[i + 2]))
- for i in range(0, len(indices) - 2)
- ]
- elif mode == 6:
- # TRIANGLE FAN
- # 3---2
- # / \ / \
- # 4---0---1
- fs = [
- (indices[0], indices[i], indices[i + 1])
- for i in range(1, len(indices) - 1)
- ]
- else:
- raise Exception('primitive mode unimplemented: %d' % mode)
-
- return es, fs
diff --git a/io_scene_gltf2/io/com/gltf2_io_constants.py b/io_scene_gltf2/io/com/gltf2_io_constants.py
index 873e004e..983fe9ab 100755
--- a/io_scene_gltf2/io/com/gltf2_io_constants.py
+++ b/io_scene_gltf2/io/com/gltf2_io_constants.py
@@ -35,6 +35,18 @@ class ComponentType(IntEnum):
}[component_type]
@classmethod
+ def to_numpy_dtype(cls, component_type):
+ import numpy as np
+ return {
+ ComponentType.Byte: np.int8,
+ ComponentType.UnsignedByte: np.uint8,
+ ComponentType.Short: np.int16,
+ ComponentType.UnsignedShort: np.uint16,
+ ComponentType.UnsignedInt: np.uint32,
+ ComponentType.Float: np.float32,
+ }[component_type]
+
+ @classmethod
def from_legacy_define(cls, type_define):
return {
GLTF_COMPONENT_TYPE_BYTE: ComponentType.Byte,
diff --git a/io_scene_gltf2/io/imp/gltf2_io_binary.py b/io_scene_gltf2/io/imp/gltf2_io_binary.py
index 7cfcbc40..728cf0f0 100755
--- a/io_scene_gltf2/io/imp/gltf2_io_binary.py
+++ b/io_scene_gltf2/io/imp/gltf2_io_binary.py
@@ -13,6 +13,7 @@
# limitations under the License.
import struct
+import numpy as np
from ..com.gltf2_io import Accessor
@@ -22,8 +23,8 @@ class BinaryData():
def __new__(cls, *args, **kwargs):
raise RuntimeError("%s should not be instantiated" % cls)
-# Note that this function is not used in Blender importer, but is kept in
-# Source code to be used in any pipeline that want to manage gltf/glb file in python
+ # Note that this function is not used in Blender importer, but is kept in
+ # Source code to be used in any pipeline that want to manage gltf/glb file in python
@staticmethod
def get_binary_from_accessor(gltf, accessor_idx):
"""Get binary from accessor."""
@@ -63,8 +64,7 @@ class BinaryData():
if accessor_idx in gltf.accessor_cache:
return gltf.accessor_cache[accessor_idx]
- accessor = gltf.data.accessors[accessor_idx]
- data = BinaryData.get_data_from_accessor_obj(gltf, accessor)
+ data = BinaryData.decode_accessor(gltf, accessor_idx).tolist()
if cache:
gltf.accessor_cache[accessor_idx] = data
@@ -72,7 +72,36 @@ class BinaryData():
return data
@staticmethod
- def get_data_from_accessor_obj(gltf, accessor):
+ def decode_accessor(gltf, accessor_idx, cache=False):
+ """Decodes accessor to 2D numpy array (count x num_components)."""
+ if accessor_idx in gltf.decode_accessor_cache:
+ return gltf.accessor_cache[accessor_idx]
+
+ accessor = gltf.data.accessors[accessor_idx]
+ array = BinaryData.decode_accessor_obj(gltf, accessor)
+
+ if cache:
+ gltf.accessor_cache[accessor_idx] = array
+ # Prevent accidentally modifying cached arrays
+ array.flags.writeable = False
+
+ return array
+
+ @staticmethod
+ def decode_accessor_obj(gltf, accessor):
+ # MAT2/3 have special alignment requirements that aren't handled. But it
+ # doesn't matter because nothing uses them.
+ assert accessor.type not in ['MAT2', 'MAT3']
+
+ dtype = {
+ 5120: np.int8,
+ 5121: np.uint8,
+ 5122: np.int16,
+ 5123: np.uint16,
+ 5125: np.uint32,
+ 5126: np.float32,
+ }[accessor.component_type]
+
if accessor.buffer_view is not None:
bufferView = gltf.data.buffer_views[accessor.buffer_view]
buffer_data = BinaryData.get_buffer_view(gltf, accessor.buffer_view)
@@ -80,40 +109,45 @@ class BinaryData():
accessor_offset = accessor.byte_offset or 0
buffer_data = buffer_data[accessor_offset:]
- fmt_char = gltf.fmt_char_dict[accessor.component_type]
component_nb = gltf.component_nb_dict[accessor.type]
- fmt = '<' + (fmt_char * component_nb)
- default_stride = struct.calcsize(fmt)
-
- # Special layouts for certain formats; see the section about
- # data alignment in the glTF 2.0 spec.
- component_size = struct.calcsize('<' + fmt_char)
- if accessor.type == 'MAT2' and component_size == 1:
- fmt = '<FFxxFF'.replace('F', fmt_char)
- default_stride = 8
- elif accessor.type == 'MAT3' and component_size == 1:
- fmt = '<FFFxFFFxFFF'.replace('F', fmt_char)
- default_stride = 12
- elif accessor.type == 'MAT3' and component_size == 2:
- fmt = '<FFFxxFFFxxFFF'.replace('F', fmt_char)
- default_stride = 24
+ bytes_per_elem = dtype(1).nbytes
+ default_stride = bytes_per_elem * component_nb
stride = bufferView.byte_stride or default_stride
- # Decode
- unpack_from = struct.Struct(fmt).unpack_from
- data = [
- unpack_from(buffer_data, offset)
- for offset in range(0, accessor.count*stride, stride)
- ]
+ if stride == default_stride:
+ array = np.frombuffer(
+ buffer_data,
+ dtype=np.dtype(dtype).newbyteorder('<'),
+ count=accessor.count * component_nb,
+ )
+ array = array.reshape(accessor.count, component_nb)
+
+ else:
+ # The data looks like
+ # XXXppXXXppXXXppXXX
+ # where X are the components and p are padding.
+ # One XXXpp group is one stride's worth of data.
+ assert stride % bytes_per_elem == 0
+ elems_per_stride = stride // bytes_per_elem
+ num_elems = (accessor.count - 1) * elems_per_stride + component_nb
+
+ array = np.frombuffer(
+ buffer_data,
+ dtype=np.dtype(dtype).newbyteorder('<'),
+ count=num_elems,
+ )
+ assert array.strides[0] == bytes_per_elem
+ array = np.lib.stride_tricks.as_strided(
+ array,
+ shape=(accessor.count, component_nb),
+ strides=(stride, bytes_per_elem),
+ )
else:
# No buffer view; initialize to zeros
component_nb = gltf.component_nb_dict[accessor.type]
- data = [
- (0,) * component_nb
- for i in range(accessor.count)
- ]
+ array = np.zeros((accessor.count, component_nb), dtype=dtype)
if accessor.sparse:
sparse_indices_obj = Accessor.from_dict({
@@ -123,6 +157,9 @@ class BinaryData():
'componentType': accessor.sparse.indices.component_type,
'type': 'SCALAR',
})
+ sparse_indices = BinaryData.decode_accessor_obj(gltf, sparse_indices_obj)
+ sparse_indices = sparse_indices.reshape(len(sparse_indices))
+
sparse_values_obj = Accessor.from_dict({
'count': accessor.sparse.count,
'bufferView': accessor.sparse.values.buffer_view,
@@ -130,31 +167,26 @@ class BinaryData():
'componentType': accessor.component_type,
'type': accessor.type,
})
- sparse_indices = BinaryData.get_data_from_accessor_obj(gltf, sparse_indices_obj)
- sparse_values = BinaryData.get_data_from_accessor_obj(gltf, sparse_values_obj)
+ sparse_values = BinaryData.decode_accessor_obj(gltf, sparse_values_obj)
- # Apply sparse
- for i in range(accessor.sparse.count):
- data[sparse_indices[i][0]] = sparse_values[i]
+ if not array.flags.writeable:
+ array = array.copy()
+ array[sparse_indices] = sparse_values
# Normalization
if accessor.normalized:
- for idx, tuple in enumerate(data):
- new_tuple = ()
- for i in tuple:
- if accessor.component_type == 5120:
- new_tuple += (max(float(i / 127.0 ), -1.0),)
- elif accessor.component_type == 5121:
- new_tuple += (float(i / 255.0),)
- elif accessor.component_type == 5122:
- new_tuple += (max(float(i / 32767.0), -1.0),)
- elif accessor.component_type == 5123:
- new_tuple += (i / 65535.0,)
- else:
- new_tuple += (float(i),)
- data[idx] = new_tuple
-
- return data
+ if accessor.component_type == 5120: # int8
+ array = np.maximum(-1.0, array / 127.0)
+ elif accessor.component_type == 5121: # uint8
+ array = array / 255.0
+ elif accessor.component_type == 5122: # int16
+ array = np.maximum(-1.0, array / 32767.0)
+ elif accessor.component_type == 5123: # uint16
+ array = array / 65535.0
+ else:
+ array = array.astype(np.float64)
+
+ return array
@staticmethod
def get_image_data(gltf, img_idx):
diff --git a/io_scene_gltf2/io/imp/gltf2_io_gltf.py b/io_scene_gltf2/io/imp/gltf2_io_gltf.py
index e63f1f55..49eee2d5 100755
--- a/io_scene_gltf2/io/imp/gltf2_io_gltf.py
+++ b/io_scene_gltf2/io/imp/gltf2_io_gltf.py
@@ -32,6 +32,7 @@ class glTFImporter():
self.glb_buffer = None
self.buffers = {}
self.accessor_cache = {}
+ self.decode_accessor_cache = {}
if 'loglevel' not in self.import_settings.keys():
self.import_settings['loglevel'] = logging.ERROR
@@ -47,6 +48,7 @@ class glTFImporter():
'KHR_materials_unlit',
'KHR_texture_transform',
'KHR_materials_clearcoat',
+ 'KHR_mesh_quantization',
]
# TODO : merge with io_constants
diff --git a/magic_uv/__init__.py b/magic_uv/__init__.py
index 8630038a..b92714fe 100644
--- a/magic_uv/__init__.py
+++ b/magic_uv/__init__.py
@@ -20,21 +20,23 @@
__author__ = "Nutti <nutti.metro@gmail.com>"
__status__ = "production"
-__version__ = "6.2"
-__date__ = "31 Jul 2019"
+__version__ = "6.3"
+__date__ = "10 Aug 2020"
bl_info = {
"name": "Magic UV",
"author": "Nutti, Mifth, Jace Priester, kgeogeo, mem, imdjs"
"Keith (Wahooney) Boshoff, McBuff, MaxRobinot, "
- "Alexander Milovsky",
- "version": (6, 2, 0),
+ "Alexander Milovsky, Dusan Stevanovic",
+ "version": (6, 3, 0),
"blender": (2, 80, 0),
"location": "See Add-ons Preferences",
"description": "UV Toolset. See Add-ons Preferences for details",
"warning": "",
"support": "COMMUNITY",
+ "wiki_url": "https://docs.blender.org/manual/en/dev/addons/"
+ "uv/magic_uv.html",
"doc_url": "{BLENDER_MANUAL_URL}/addons/uv/magic_uv.html",
"tracker_url": "https://github.com/nutti/Magic-UV",
"category": "UV",
diff --git a/magic_uv/common.py b/magic_uv/common.py
index df3597be..11696667 100644
--- a/magic_uv/common.py
+++ b/magic_uv/common.py
@@ -20,8 +20,8 @@
__author__ = "Nutti <nutti.metro@gmail.com>"
__status__ = "production"
-__version__ = "6.2"
-__date__ = "31 Jul 2019"
+__version__ = "6.3"
+__date__ = "10 Aug 2020"
from collections import defaultdict
from pprint import pprint
@@ -244,15 +244,16 @@ def __parse_island(bm, face_idx, faces_left, island,
Parse island
"""
- if face_idx in faces_left:
- faces_left.remove(face_idx)
- island.append({'face': bm.faces[face_idx]})
- for v in face_to_verts[face_idx]:
- connected_faces = vert_to_faces[v]
- if connected_faces:
+ faces_to_parse = [face_idx]
+ while faces_to_parse:
+ fidx = faces_to_parse.pop(0)
+ if fidx in faces_left:
+ faces_left.remove(fidx)
+ island.append({'face': bm.faces[fidx]})
+ for v in face_to_verts[fidx]:
+ connected_faces = vert_to_faces[v]
for cf in connected_faces:
- __parse_island(bm, cf, faces_left, island, face_to_verts,
- vert_to_faces)
+ faces_to_parse.append(cf)
def __get_island(bm, face_to_verts, vert_to_faces):
@@ -351,18 +352,60 @@ def calc_polygon_3d_area(points):
return 0.5 * area
-def measure_mesh_area(obj):
+def get_faces_list(bm, method, only_selected):
+ faces_list = []
+ if method == 'MESH':
+ if only_selected:
+ faces_list.append([f for f in bm.faces if f.select])
+ else:
+ faces_list.append([f for f in bm.faces])
+ elif method == 'UV ISLAND':
+ if not bm.loops.layers.uv:
+ return None
+ uv_layer = bm.loops.layers.uv.verify()
+ if only_selected:
+ faces = [f for f in bm.faces if f.select]
+ islands = get_island_info_from_faces(bm, faces, uv_layer)
+ for isl in islands:
+ faces_list.append([f["face"] for f in isl["faces"]])
+ else:
+ faces = [f for f in bm.faces]
+ islands = get_island_info_from_faces(bm, faces, uv_layer)
+ for isl in islands:
+ faces_list.append([f["face"] for f in isl["faces"]])
+ elif method == 'FACE':
+ if only_selected:
+ for f in bm.faces:
+ if f.select:
+ faces_list.append([f])
+ else:
+ for f in bm.faces:
+ faces_list.append([f])
+ else:
+ raise ValueError("Invalid method: {}".format(method))
+
+ return faces_list
+
+
+def measure_mesh_area(obj, calc_method, only_selected):
bm = bmesh.from_edit_mesh(obj.data)
if check_version(2, 73, 0) >= 0:
bm.verts.ensure_lookup_table()
bm.edges.ensure_lookup_table()
bm.faces.ensure_lookup_table()
- sel_faces = [f for f in bm.faces if f.select]
+ faces_list = get_faces_list(bm, calc_method, only_selected)
- # measure
+ areas = []
+ for faces in faces_list:
+ areas.append(measure_mesh_area_from_faces(faces))
+
+ return areas
+
+
+def measure_mesh_area_from_faces(faces):
mesh_area = 0.0
- for f in sel_faces:
+ for f in faces:
verts = [l.vert.co for l in f.loops]
f_mesh_area = calc_polygon_3d_area(verts)
mesh_area = mesh_area + f_mesh_area
@@ -405,7 +448,7 @@ def find_image(obj, face=None, tex_layer=None):
if len(images) >= 2:
raise RuntimeError("Find more than 2 images")
- if len(images) == 0:
+ if not images:
return None
return images[0]
@@ -428,40 +471,26 @@ def find_images(obj, face=None, tex_layer=None):
return images
-def measure_uv_area(obj, method='FIRST', tex_size=None):
- bm = bmesh.from_edit_mesh(obj.data)
- if check_version(2, 73, 0) >= 0:
- bm.verts.ensure_lookup_table()
- bm.edges.ensure_lookup_table()
- bm.faces.ensure_lookup_table()
-
- if not bm.loops.layers.uv:
- return None
- uv_layer = bm.loops.layers.uv.verify()
-
- tex_layer = find_texture_layer(bm)
-
- sel_faces = [f for f in bm.faces if f.select]
-
- # measure
+def measure_uv_area_from_faces(obj, faces, uv_layer, tex_layer,
+ tex_selection_method, tex_size):
uv_area = 0.0
- for f in sel_faces:
+ for f in faces:
uvs = [l[uv_layer].uv for l in f.loops]
f_uv_area = calc_polygon_2d_area(uvs)
# user specified
- if method == 'USER_SPECIFIED' and tex_size is not None:
+ if tex_selection_method == 'USER_SPECIFIED' and tex_size is not None:
img_size = tex_size
# first texture if there are more than 2 textures assigned
# to the object
- elif method == 'FIRST':
+ elif tex_selection_method == 'FIRST':
img = find_image(obj, f, tex_layer)
# can not find from node, so we can not get texture size
if not img:
return None
img_size = img.size
# average texture size
- elif method == 'AVERAGE':
+ elif tex_selection_method == 'AVERAGE':
imgs = find_images(obj, f, tex_layer)
if not imgs:
return None
@@ -473,7 +502,7 @@ def measure_uv_area(obj, method='FIRST', tex_size=None):
img_size = [img_size_total[0] / len(imgs),
img_size_total[1] / len(imgs)]
# max texture size
- elif method == 'MAX':
+ elif tex_selection_method == 'MAX':
imgs = find_images(obj, f, tex_layer)
if not imgs:
return None
@@ -484,7 +513,7 @@ def measure_uv_area(obj, method='FIRST', tex_size=None):
max(img_size_max[1], img.size[1])]
img_size = img_size_max
# min texture size
- elif method == 'MIN':
+ elif tex_selection_method == 'MIN':
imgs = find_images(obj, f, tex_layer)
if not imgs:
return None
@@ -495,13 +524,40 @@ def measure_uv_area(obj, method='FIRST', tex_size=None):
min(img_size_min[1], img.size[1])]
img_size = img_size_min
else:
- raise RuntimeError("Unexpected method: {}".format(method))
+ raise RuntimeError("Unexpected method: {}"
+ .format(tex_selection_method))
- uv_area = uv_area + f_uv_area * img_size[0] * img_size[1]
+ uv_area += f_uv_area * img_size[0] * img_size[1]
return uv_area
+def measure_uv_area(obj, calc_method, tex_selection_method, tex_size,
+ only_selected):
+ bm = bmesh.from_edit_mesh(obj.data)
+ if check_version(2, 73, 0) >= 0:
+ bm.verts.ensure_lookup_table()
+ bm.edges.ensure_lookup_table()
+ bm.faces.ensure_lookup_table()
+
+ if not bm.loops.layers.uv:
+ return None
+ uv_layer = bm.loops.layers.uv.verify()
+ tex_layer = find_texture_layer(bm)
+ faces_list = get_faces_list(bm, calc_method, only_selected)
+
+ # measure
+ uv_areas = []
+ for faces in faces_list:
+ uv_area = measure_uv_area_from_faces(
+ obj, faces, uv_layer, tex_layer, tex_selection_method, tex_size)
+ if uv_area is None:
+ return None
+ uv_areas.append(uv_area)
+
+ return uv_areas
+
+
def diff_point_to_segment(a, b, p):
ab = b - a
normal_ab = ab.normalized()
@@ -520,43 +576,42 @@ def diff_point_to_segment(a, b, p):
# get selected loop pair whose loops are connected each other
def __get_loop_pairs(l, uv_layer):
-
- def __get_loop_pairs_internal(l_, pairs_, uv_layer_, parsed_):
- parsed_.append(l_)
- for ll in l_.vert.link_loops:
+ pairs = []
+ parsed = []
+ loops_ready = [l]
+ while loops_ready:
+ l = loops_ready.pop(0)
+ parsed.append(l)
+ for ll in l.vert.link_loops:
# forward direction
lln = ll.link_loop_next
# if there is same pair, skip it
found = False
- for p in pairs_:
+ for p in pairs:
if (ll in p) and (lln in p):
found = True
break
# two loops must be selected
- if ll[uv_layer_].select and lln[uv_layer_].select:
+ if ll[uv_layer].select and lln[uv_layer].select:
if not found:
- pairs_.append([ll, lln])
- if lln not in parsed_:
- __get_loop_pairs_internal(lln, pairs_, uv_layer_, parsed_)
+ pairs.append([ll, lln])
+ if (lln not in parsed) and (lln not in loops_ready):
+ loops_ready.append(lln)
# backward direction
llp = ll.link_loop_prev
# if there is same pair, skip it
found = False
- for p in pairs_:
+ for p in pairs:
if (ll in p) and (llp in p):
found = True
break
# two loops must be selected
- if ll[uv_layer_].select and llp[uv_layer_].select:
+ if ll[uv_layer].select and llp[uv_layer].select:
if not found:
- pairs_.append([ll, llp])
- if llp not in parsed_:
- __get_loop_pairs_internal(llp, pairs_, uv_layer_, parsed_)
-
- pairs = []
- parsed = []
- __get_loop_pairs_internal(l, pairs, uv_layer, parsed)
+ pairs.append([ll, llp])
+ if (llp not in parsed) and (llp not in loops_ready):
+ loops_ready.append(llp)
return pairs
@@ -876,12 +931,12 @@ class RingBuffer:
# clip: reference polygon
# subject: tested polygon
-def __do_weiler_atherton_cliping(clip, subject, uv_layer, mode):
+def __do_weiler_atherton_cliping(clip_uvs, subject_uvs, mode):
- clip_uvs = RingBuffer([l[uv_layer].uv.copy() for l in clip.loops])
+ clip_uvs = RingBuffer(clip_uvs)
if __is_polygon_flipped(clip_uvs):
clip_uvs.reverse()
- subject_uvs = RingBuffer([l[uv_layer].uv.copy() for l in subject.loops])
+ subject_uvs = RingBuffer(subject_uvs)
if __is_polygon_flipped(subject_uvs):
subject_uvs.reverse()
@@ -1111,22 +1166,29 @@ def __is_points_in_polygon(points, subject_points):
return True
-def get_overlapped_uv_info(bm, faces, uv_layer, mode):
+def get_overlapped_uv_info(bm_list, faces_list, uv_layer_list, mode):
# at first, check island overlapped
- isl = get_island_info_from_faces(bm, faces, uv_layer)
+ isl = []
+ for bm, uv_layer, faces in zip(bm_list, uv_layer_list, faces_list):
+ info = get_island_info_from_faces(bm, faces, uv_layer)
+ isl.extend([(i, uv_layer) for i in info])
+
overlapped_isl_pairs = []
- for i, i1 in enumerate(isl):
- for i2 in isl[i + 1:]:
+ overlapped_uv_layer_pairs = []
+ for i, (i1, uv_layer_1) in enumerate(isl):
+ for i2, uv_layer_2 in isl[i + 1:]:
if (i1["max"].x < i2["min"].x) or (i2["max"].x < i1["min"].x) or \
(i1["max"].y < i2["min"].y) or (i2["max"].y < i1["min"].y):
continue
overlapped_isl_pairs.append([i1, i2])
+ overlapped_uv_layer_pairs.append([uv_layer_1, uv_layer_2])
# next, check polygon overlapped
overlapped_uvs = []
- for oip in overlapped_isl_pairs:
+ for oip, uvlp in zip(overlapped_isl_pairs, overlapped_uv_layer_pairs):
for clip in oip[0]["faces"]:
f_clip = clip["face"]
+ clip_uvs = [l[uvlp[0]].uv.copy() for l in f_clip.loops]
for subject in oip[1]["faces"]:
f_subject = subject["face"]
@@ -1137,29 +1199,33 @@ def get_overlapped_uv_info(bm, faces, uv_layer, mode):
(subject["max_uv"].y < clip["min_uv"].y):
continue
+ subject_uvs = [l[uvlp[1]].uv.copy() for l in f_subject.loops]
# slow operation, apply Weiler-Atherton cliping algorithm
- result, polygons = __do_weiler_atherton_cliping(f_clip,
- f_subject,
- uv_layer, mode)
+ result, polygons = __do_weiler_atherton_cliping(clip_uvs,
+ subject_uvs,
+ mode)
if result:
- subject_uvs = [l[uv_layer].uv.copy()
- for l in f_subject.loops]
overlapped_uvs.append({"clip_face": f_clip,
"subject_face": f_subject,
+ "clip_uv_layer": uvlp[0],
+ "subject_uv_layer": uvlp[1],
"subject_uvs": subject_uvs,
"polygons": polygons})
return overlapped_uvs
-def get_flipped_uv_info(faces, uv_layer):
+def get_flipped_uv_info(faces_list, uv_layer_list):
flipped_uvs = []
- for f in faces:
- polygon = RingBuffer([l[uv_layer].uv.copy() for l in f.loops])
- if __is_polygon_flipped(polygon):
- uvs = [l[uv_layer].uv.copy() for l in f.loops]
- flipped_uvs.append({"face": f, "uvs": uvs,
- "polygons": [polygon.as_list()]})
+ for faces, uv_layer in zip(faces_list, uv_layer_list):
+ for f in faces:
+ polygon = RingBuffer([l[uv_layer].uv.copy() for l in f.loops])
+ if __is_polygon_flipped(polygon):
+ uvs = [l[uv_layer].uv.copy() for l in f.loops]
+ flipped_uvs.append({"face": f,
+ "uv_layer": uv_layer,
+ "uvs": uvs,
+ "polygons": [polygon.as_list()]})
return flipped_uvs
diff --git a/magic_uv/lib/__init__.py b/magic_uv/lib/__init__.py
index 3258b6eb..5e06552d 100644
--- a/magic_uv/lib/__init__.py
+++ b/magic_uv/lib/__init__.py
@@ -20,8 +20,8 @@
__author__ = "Nutti <nutti.metro@gmail.com>"
__status__ = "production"
-__version__ = "6.2"
-__date__ = "31 Jul 2019"
+__version__ = "6.3"
+__date__ = "10 Aug 2020"
if "bpy" in locals():
import importlib
diff --git a/magic_uv/op/__init__.py b/magic_uv/op/__init__.py
index cd743b48..b7316192 100644
--- a/magic_uv/op/__init__.py
+++ b/magic_uv/op/__init__.py
@@ -20,13 +20,14 @@
__author__ = "Nutti <nutti.metro@gmail.com>"
__status__ = "production"
-__version__ = "6.2"
-__date__ = "31 Jul 2019"
+__version__ = "6.3"
+__date__ = "10 Aug 2020"
if "bpy" in locals():
import importlib
importlib.reload(align_uv)
importlib.reload(align_uv_cursor)
+ importlib.reload(clip_uv)
importlib.reload(copy_paste_uv)
importlib.reload(copy_paste_uv_object)
importlib.reload(copy_paste_uv_uvedit)
@@ -50,6 +51,7 @@ if "bpy" in locals():
else:
from . import align_uv
from . import align_uv_cursor
+ from . import clip_uv
from . import copy_paste_uv
from . import copy_paste_uv_object
from . import copy_paste_uv_uvedit
diff --git a/magic_uv/op/align_uv.py b/magic_uv/op/align_uv.py
index 31f7cbe8..77afcc25 100644
--- a/magic_uv/op/align_uv.py
+++ b/magic_uv/op/align_uv.py
@@ -20,8 +20,8 @@
__author__ = "imdjs, Nutti <nutti.metro@gmail.com>"
__status__ = "production"
-__version__ = "6.2"
-__date__ = "31 Jul 2019"
+__version__ = "6.3"
+__date__ = "10 Aug 2020"
import math
from math import atan2, tan, sin, cos
@@ -164,8 +164,7 @@ def _get_hdiff_uv_vinfl(uv_layer, loop_seqs, vidx, hidx, pidx, infl):
# calculate target UV
for i in range(len(accum_uvlens[:-1])):
# get line segment which UV will be placed
- if ((accum_uvlens[i] <= target_length) and
- (accum_uvlens[i + 1] > target_length)):
+ if accum_uvlens[i] <= target_length < accum_uvlens[i + 1]:
tgt_seg_len = target_length - accum_uvlens[i]
seg_len = accum_uvlens[i + 1] - accum_uvlens[i]
uv1 = orig_uvs[i]
@@ -245,8 +244,7 @@ def _get_vdiff_uv_vinfl(uv_layer, loop_seqs, vidx, hidx, pidx, infl):
# calculate target UV
for i in range(len(accum_uvlens[:-1])):
# get line segment which UV will be placed
- if ((accum_uvlens[i] <= target_length) and
- (accum_uvlens[i + 1] > target_length)):
+ if accum_uvlens[i] <= target_length < accum_uvlens[i + 1]:
tgt_seg_len = target_length - accum_uvlens[i]
seg_len = accum_uvlens[i + 1] - accum_uvlens[i]
uv1 = orig_uvs[i]
diff --git a/magic_uv/op/align_uv_cursor.py b/magic_uv/op/align_uv_cursor.py
index b103de31..884f645a 100644
--- a/magic_uv/op/align_uv_cursor.py
+++ b/magic_uv/op/align_uv_cursor.py
@@ -20,8 +20,8 @@
__author__ = "Nutti <nutti.metro@gmail.com>"
__status__ = "production"
-__version__ = "6.2"
-__date__ = "31 Jul 2019"
+__version__ = "6.3"
+__date__ = "10 Aug 2020"
import bpy
from mathutils import Vector
diff --git a/magic_uv/op/clip_uv.py b/magic_uv/op/clip_uv.py
new file mode 100644
index 00000000..c6f006e2
--- /dev/null
+++ b/magic_uv/op/clip_uv.py
@@ -0,0 +1,227 @@
+# <pep8-80 compliant>
+
+# ##### BEGIN GPL LICENSE BLOCK #####
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License
+# as published by the Free Software Foundation; either version 2
+# of the License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+#
+# ##### END GPL LICENSE BLOCK #####
+
+__author__ = "Dusan Stevanovic, Nutti <nutti.metro@gmail.com>"
+__status__ = "production"
+__version__ = "6.3"
+__date__ = "10 Aug 2020"
+
+
+import math
+
+import bpy
+import bmesh
+from mathutils import Vector
+from bpy.props import BoolProperty, FloatVectorProperty
+
+from .. import common
+from ..utils.bl_class_registry import BlClassRegistry
+from ..utils.property_class_registry import PropertyClassRegistry
+from ..utils import compatibility as compat
+
+
+def _is_valid_context(context):
+ # 'IMAGE_EDITOR' and 'VIEW_3D' space is allowed to execute.
+ # If 'View_3D' space is not allowed, you can't find option in Tool-Shelf
+ # after the execution
+ for space in context.area.spaces:
+ if (space.type == 'IMAGE_EDITOR') or (space.type == 'VIEW_3D'):
+ break
+ else:
+ return False
+
+ return True
+
+
+def round_clip_uv_range(v):
+ sign = 1 if v >= 0.0 else -1
+ return int((math.fabs(v) + 0.25) / 0.5) * 0.5 * sign
+
+
+def get_clip_uv_range_max(self):
+ return self.get('muv_clip_uv_range_max', (0.5, 0.5))
+
+
+def set_clip_uv_range_max(self, value):
+ u = round_clip_uv_range(value[0])
+ u = 0.5 if u <= 0.5 else u
+ v = round_clip_uv_range(value[1])
+ v = 0.5 if v <= 0.5 else v
+ self['muv_clip_uv_range_max'] = (u, v)
+
+
+def get_clip_uv_range_min(self):
+ return self.get('muv_clip_uv_range_min', (-0.5, -0.5))
+
+
+def set_clip_uv_range_min(self, value):
+ u = round_clip_uv_range(value[0])
+ u = -0.5 if u >= -0.5 else u
+ v = round_clip_uv_range(value[1])
+ v = -0.5 if v >= -0.5 else v
+ self['muv_clip_uv_range_min'] = (u, v)
+
+
+@PropertyClassRegistry()
+class _Properties:
+ idname = "clip_uv"
+
+ @classmethod
+ def init_props(cls, scene):
+ scene.muv_clip_uv_enabled = BoolProperty(
+ name="Clip UV Enabled",
+ description="Clip UV is enabled",
+ default=False
+ )
+
+ scene.muv_clip_uv_range_max = FloatVectorProperty(
+ name="Range Max",
+ description="Max UV coordinates of the range to be clipped",
+ size=2,
+ default=(0.5, 0.5),
+ min=0.5,
+ step=50,
+ get=get_clip_uv_range_max,
+ set=set_clip_uv_range_max,
+ )
+
+ scene.muv_clip_uv_range_min = FloatVectorProperty(
+ name="Range Min",
+ description="Min UV coordinates of the range to be clipped",
+ size=2,
+ default=(-0.5, -0.5),
+ max=-0.5,
+ step=50,
+ get=get_clip_uv_range_min,
+ set=set_clip_uv_range_min,
+ )
+
+ # TODO: add option to preserve UV island
+
+ @classmethod
+ def del_props(cls, scene):
+ del scene.muv_clip_uv_range_max
+ del scene.muv_clip_uv_range_min
+
+
+@BlClassRegistry()
+@compat.make_annotations
+class MUV_OT_ClipUV(bpy.types.Operator):
+
+ bl_idname = "uv.muv_clip_uv"
+ bl_label = "Clip UV"
+ bl_description = "Clip selected UV in the specified range"
+ bl_options = {'REGISTER', 'UNDO'}
+
+ clip_uv_range_max = FloatVectorProperty(
+ name="Range Max",
+ description="Max UV coordinates of the range to be clipped",
+ size=2,
+ default=(0.5, 0.5),
+ min=0.5,
+ step=50,
+ )
+
+ clip_uv_range_min = FloatVectorProperty(
+ name="Range Min",
+ description="Min UV coordinates of the range to be clipped",
+ size=2,
+ default=(-0.5, -0.5),
+ max=-0.5,
+ step=50,
+ )
+
+ @classmethod
+ def poll(cls, context):
+ # we can not get area/space/region from console
+ if common.is_console_mode():
+ return True
+ return _is_valid_context(context)
+
+ def execute(self, context):
+ obj = context.active_object
+ bm = common.create_bmesh(obj)
+
+ if not bm.loops.layers.uv:
+ self.report({'WARNING'}, "Object must have more than one UV map")
+ return {'CANCELLED'}
+
+ uv_layer = bm.loops.layers.uv.verify()
+
+ for face in bm.faces:
+ if not face.select:
+ continue
+
+ selected_loops = [l for l in face.loops
+ if l[uv_layer].select or
+ context.scene.tool_settings.use_uv_select_sync]
+ if not selected_loops:
+ continue
+
+ # average of UV coordinates on the face
+ max_uv = Vector((-10000000.0, -10000000.0))
+ min_uv = Vector((10000000.0, 10000000.0))
+ for l in selected_loops:
+ uv = l[uv_layer].uv
+ max_uv.x = max(max_uv.x, uv.x)
+ max_uv.y = max(max_uv.y, uv.y)
+ min_uv.x = min(min_uv.x, uv.x)
+ min_uv.y = min(min_uv.y, uv.y)
+
+ # clip
+ move_uv = Vector((0.0, 0.0))
+ clip_size = Vector(self.clip_uv_range_max) - \
+ Vector(self.clip_uv_range_min)
+ if max_uv.x > self.clip_uv_range_max[0]:
+ target_x = math.fmod(max_uv.x - self.clip_uv_range_min[0],
+ clip_size.x)
+ if target_x < 0.0:
+ target_x += clip_size.x
+ target_x += self.clip_uv_range_min[0]
+ move_uv.x = target_x - max_uv.x
+ if min_uv.x < self.clip_uv_range_min[0]:
+ target_x = math.fmod(min_uv.x - self.clip_uv_range_min[0],
+ clip_size.x)
+ if target_x < 0.0:
+ target_x += clip_size.x
+ target_x += self.clip_uv_range_min[0]
+ move_uv.x = target_x - min_uv.x
+ if max_uv.y > self.clip_uv_range_max[1]:
+ target_y = math.fmod(max_uv.y - self.clip_uv_range_min[1],
+ clip_size.y)
+ if target_y < 0.0:
+ target_y += clip_size.y
+ target_y += self.clip_uv_range_min[1]
+ move_uv.y = target_y - max_uv.y
+ if min_uv.y < self.clip_uv_range_min[1]:
+ target_y = math.fmod(min_uv.y - self.clip_uv_range_min[1],
+ clip_size.y)
+ if target_y < 0.0:
+ target_y += clip_size.y
+ target_y += self.clip_uv_range_min[1]
+ move_uv.y = target_y - min_uv.y
+
+ # update UV
+ for l in selected_loops:
+ l[uv_layer].uv = l[uv_layer].uv + move_uv
+
+ bmesh.update_edit_mesh(obj.data)
+
+ return {'FINISHED'}
diff --git a/magic_uv/op/copy_paste_uv.py b/magic_uv/op/copy_paste_uv.py
index 5126e241..ba754425 100644
--- a/magic_uv/op/copy_paste_uv.py
+++ b/magic_uv/op/copy_paste_uv.py
@@ -20,8 +20,8 @@
__author__ = "Nutti <nutti.metro@gmail.com>, Jace Priester"
__status__ = "production"
-__version__ = "6.2"
-__date__ = "31 Jul 2019"
+__version__ = "6.3"
+__date__ = "10 Aug 2020"
import bmesh
import bpy.utils
@@ -75,7 +75,7 @@ def get_copy_uv_layers(ops_obj, bm, uv_map):
else:
uv_layers.append(bm.loops.layers.uv[uv_map])
ops_obj.report(
- {'INFO'}, "Copy UV coordinate (UV map:{})".format(uv_map))
+ {'INFO'}, "Copy UV coordinate (UV map: {})".format(uv_map))
return uv_layers
@@ -97,7 +97,8 @@ def get_paste_uv_layers(ops_obj, obj, bm, src_info, uv_map):
return None
uv_layers.append(bm.loops.layers.uv[new_uv_map.name])
ops_obj.report(
- {'INFO'}, "Paste UV coordinate (UV map:{})".format(new_uv_map))
+ {'INFO'},
+ "Paste UV coordinate (UV map: {})".format(new_uv_map.name))
elif uv_map == "__all":
for src_layer in src_info.keys():
if src_layer not in bm.loops.layers.uv.keys():
@@ -111,7 +112,7 @@ def get_paste_uv_layers(ops_obj, obj, bm, src_info, uv_map):
else:
uv_layers.append(bm.loops.layers.uv[uv_map])
ops_obj.report(
- {'INFO'}, "Paste UV coordinate (UV map:{})".format(uv_map))
+ {'INFO'}, "Paste UV coordinate (UV map: {})".format(uv_map))
return uv_layers
diff --git a/magic_uv/op/copy_paste_uv_object.py b/magic_uv/op/copy_paste_uv_object.py
index 3297f2b8..1b812b82 100644
--- a/magic_uv/op/copy_paste_uv_object.py
+++ b/magic_uv/op/copy_paste_uv_object.py
@@ -20,8 +20,8 @@
__author__ = "Nutti <nutti.metro@gmail.com>"
__status__ = "production"
-__version__ = "6.2"
-__date__ = "31 Jul 2019"
+__version__ = "6.3"
+__date__ = "10 Aug 2020"
import bmesh
import bpy
diff --git a/magic_uv/op/copy_paste_uv_uvedit.py b/magic_uv/op/copy_paste_uv_uvedit.py
index 7704d1c9..f12851dd 100644
--- a/magic_uv/op/copy_paste_uv_uvedit.py
+++ b/magic_uv/op/copy_paste_uv_uvedit.py
@@ -20,8 +20,8 @@
__author__ = "imdjs, Nutti <nutti.metro@gmail.com>"
__status__ = "production"
-__version__ = "6.2"
-__date__ = "31 Jul 2019"
+__version__ = "6.3"
+__date__ = "10 Aug 2020"
import math
from math import atan2, sin, cos
diff --git a/magic_uv/op/flip_rotate_uv.py b/magic_uv/op/flip_rotate_uv.py
index da8af4c3..d0ac6a83 100644
--- a/magic_uv/op/flip_rotate_uv.py
+++ b/magic_uv/op/flip_rotate_uv.py
@@ -20,8 +20,8 @@
__author__ = "Nutti <nutti.metro@gmail.com>"
__status__ = "production"
-__version__ = "6.2"
-__date__ = "31 Jul 2019"
+__version__ = "6.3"
+__date__ = "10 Aug 2020"
import bpy
import bmesh
diff --git a/magic_uv/op/mirror_uv.py b/magic_uv/op/mirror_uv.py
index d28cf826..dcbaad5e 100644
--- a/magic_uv/op/mirror_uv.py
+++ b/magic_uv/op/mirror_uv.py
@@ -20,8 +20,8 @@
__author__ = "Keith (Wahooney) Boshoff, Nutti <nutti.metro@gmail.com>"
__status__ = "production"
-__version__ = "6.2"
-__date__ = "31 Jul 2019"
+__version__ = "6.3"
+__date__ = "10 Aug 2020"
import bpy
from bpy.props import (
diff --git a/magic_uv/op/move_uv.py b/magic_uv/op/move_uv.py
index 881ab378..19160a46 100644
--- a/magic_uv/op/move_uv.py
+++ b/magic_uv/op/move_uv.py
@@ -20,8 +20,8 @@
__author__ = "kgeogeo, mem, Nutti <nutti.metro@gmail.com>"
__status__ = "production"
-__version__ = "6.2"
-__date__ = "31 Jul 2019"
+__version__ = "6.3"
+__date__ = "10 Aug 2020"
import bpy
from bpy.props import BoolProperty
@@ -54,20 +54,6 @@ def _is_valid_context(context):
return True
-def _find_uv(context):
- bm = bmesh.from_edit_mesh(context.object.data)
- topology_dict = []
- uvs = []
- active_uv = bm.loops.layers.uv.active
- for fidx, f in enumerate(bm.faces):
- for vidx, v in enumerate(f.verts):
- if v.select:
- uvs.append(f.loops[vidx][active_uv].uv.copy())
- topology_dict.append([fidx, vidx])
-
- return topology_dict, uvs
-
-
@PropertyClassRegistry()
class _Properties:
idname = "move_uv"
@@ -106,6 +92,9 @@ class MUV_OT_MoveUV(bpy.types.Operator):
self.__ini_uvs = []
self.__operating = False
+ # Creation of BMesh is high cost, so cache related objects.
+ self.__cache = {}
+
@classmethod
def poll(cls, context):
# we can not get area/space/region from console
@@ -119,7 +108,18 @@ class MUV_OT_MoveUV(bpy.types.Operator):
def is_running(cls, _):
return cls.__running
- def modal(self, context, event):
+ def _find_uv(self, bm, active_uv):
+ topology_dict = []
+ uvs = []
+ for fidx, f in enumerate(bm.faces):
+ for vidx, v in enumerate(f.verts):
+ if v.select:
+ uvs.append(f.loops[vidx][active_uv].uv.copy())
+ topology_dict.append([fidx, vidx])
+
+ return topology_dict, uvs
+
+ def modal(self, _, event):
if self.__first_time is True:
self.__prev_mouse = Vector((
event.mouse_region_x, event.mouse_region_y))
@@ -146,12 +146,11 @@ class MUV_OT_MoveUV(bpy.types.Operator):
return {'RUNNING_MODAL'}
# update UV
- obj = context.object
- bm = bmesh.from_edit_mesh(obj.data)
- active_uv = bm.loops.layers.uv.active
- for fidx, vidx in self.__topology_dict:
- l = bm.faces[fidx].loops[vidx]
- l[active_uv].uv = l[active_uv].uv + dv
+ obj = self.__cache["active_object"]
+ bm = self.__cache["bmesh"]
+ active_uv = self.__cache["active_uv"]
+ for uv in self.__cache["target_uv"]:
+ uv += dv
bmesh.update_edit_mesh(obj.data)
# check mouse preference
@@ -163,10 +162,12 @@ class MUV_OT_MoveUV(bpy.types.Operator):
for (fidx, vidx), uv in zip(self.__topology_dict, self.__ini_uvs):
bm.faces[fidx].loops[vidx][active_uv].uv = uv
MUV_OT_MoveUV.__running = False
+ self.__cache = {}
return {'FINISHED'}
# confirmed
if event.type == confirm_btn and event.value == 'PRESS':
MUV_OT_MoveUV.__running = False
+ self.__cache = {}
return {'FINISHED'}
return {'RUNNING_MODAL'}
@@ -177,7 +178,21 @@ class MUV_OT_MoveUV(bpy.types.Operator):
self.__first_time = True
context.window_manager.modal_handler_add(self)
- self.__topology_dict, self.__ini_uvs = _find_uv(context)
+
+ obj = context.active_object
+ bm = bmesh.from_edit_mesh(obj.data)
+ active_uv = bm.loops.layers.uv.active
+ self.__topology_dict, self.__ini_uvs = self._find_uv(bm, active_uv)
+
+ # Optimization: Store temporary variables which cause heavy
+ # calculation.
+ self.__cache["active_object"] = obj
+ self.__cache["bmesh"] = bm
+ self.__cache["active_uv"] = active_uv
+ self.__cache["target_uv"] = []
+ for fidx, vidx in self.__topology_dict:
+ l = bm.faces[fidx].loops[vidx]
+ self.__cache["target_uv"].append(l[active_uv].uv)
if context.area:
context.area.tag_redraw()
diff --git a/magic_uv/op/pack_uv.py b/magic_uv/op/pack_uv.py
index 3589231a..0d7ed966 100644
--- a/magic_uv/op/pack_uv.py
+++ b/magic_uv/op/pack_uv.py
@@ -20,8 +20,8 @@
__author__ = "Nutti <nutti.metro@gmail.com>"
__status__ = "production"
-__version__ = "6.2"
-__date__ = "31 Jul 2019"
+__version__ = "6.3"
+__date__ = "10 Aug 2020"
from math import fabs
diff --git a/magic_uv/op/preserve_uv_aspect.py b/magic_uv/op/preserve_uv_aspect.py
index c9ba7204..5b3e50cf 100644
--- a/magic_uv/op/preserve_uv_aspect.py
+++ b/magic_uv/op/preserve_uv_aspect.py
@@ -20,8 +20,8 @@
__author__ = "Nutti <nutti.metro@gmail.com>"
__status__ = "production"
-__version__ = "6.2"
-__date__ = "31 Jul 2019"
+__version__ = "6.3"
+__date__ = "10 Aug 2020"
import bpy
from bpy.props import StringProperty, EnumProperty, BoolProperty
diff --git a/magic_uv/op/select_uv.py b/magic_uv/op/select_uv.py
index 223f9e2f..d80b43a8 100644
--- a/magic_uv/op/select_uv.py
+++ b/magic_uv/op/select_uv.py
@@ -20,8 +20,8 @@
__author__ = "Nutti <nutti.metro@gmail.com>"
__status__ = "production"
-__version__ = "6.2"
-__date__ = "31 Jul 2019"
+__version__ = "6.3"
+__date__ = "10 Aug 2020"
import bpy
from bpy.props import BoolProperty
@@ -30,6 +30,7 @@ import bmesh
from .. import common
from ..utils.bl_class_registry import BlClassRegistry
from ..utils.property_class_registry import PropertyClassRegistry
+from ..utils import compatibility as compat
def _is_valid_context(context):
@@ -91,28 +92,42 @@ class MUV_OT_SelectUV_SelectOverlapped(bpy.types.Operator):
return _is_valid_context(context)
def execute(self, context):
- obj = context.active_object
- bm = bmesh.from_edit_mesh(obj.data)
- if common.check_version(2, 73, 0) >= 0:
- bm.faces.ensure_lookup_table()
- uv_layer = bm.loops.layers.uv.verify()
+ objs = [o for o in bpy.data.objects if compat.get_object_select(o)]
+
+ bm_list = []
+ uv_layer_list = []
+ faces_list = []
+ for o in bpy.data.objects:
+ if not compat.get_object_select(o):
+ continue
+ if o.type != 'MESH':
+ continue
+
+ bm = bmesh.from_edit_mesh(o.data)
+ if common.check_version(2, 73, 0) >= 0:
+ bm.faces.ensure_lookup_table()
+ uv_layer = bm.loops.layers.uv.verify()
- if context.tool_settings.use_uv_select_sync:
- sel_faces = [f for f in bm.faces]
- else:
- sel_faces = [f for f in bm.faces if f.select]
+ if context.tool_settings.use_uv_select_sync:
+ sel_faces = [f for f in bm.faces]
+ else:
+ sel_faces = [f for f in bm.faces if f.select]
+ bm_list.append(bm)
+ uv_layer_list.append(uv_layer)
+ faces_list.append(sel_faces)
- overlapped_info = common.get_overlapped_uv_info(bm, sel_faces,
- uv_layer, 'FACE')
+ overlapped_info = common.get_overlapped_uv_info(bm_list, faces_list,
+ uv_layer_list, 'FACE')
for info in overlapped_info:
if context.tool_settings.use_uv_select_sync:
info["subject_face"].select = True
else:
for l in info["subject_face"].loops:
- l[uv_layer].select = True
+ l[info["subject_uv_layer"]].select = True
- bmesh.update_edit_mesh(obj.data)
+ for o in objs:
+ bmesh.update_edit_mesh(o.data)
return {'FINISHED'}
@@ -136,26 +151,40 @@ class MUV_OT_SelectUV_SelectFlipped(bpy.types.Operator):
return _is_valid_context(context)
def execute(self, context):
- obj = context.active_object
- bm = bmesh.from_edit_mesh(obj.data)
- if common.check_version(2, 73, 0) >= 0:
- bm.faces.ensure_lookup_table()
- uv_layer = bm.loops.layers.uv.verify()
+ objs = [o for o in bpy.data.objects if compat.get_object_select(o)]
+
+ bm_list = []
+ uv_layer_list = []
+ faces_list = []
+ for o in bpy.data.objects:
+ if not compat.get_object_select(o):
+ continue
+ if o.type != 'MESH':
+ continue
+
+ bm = bmesh.from_edit_mesh(o.data)
+ if common.check_version(2, 73, 0) >= 0:
+ bm.faces.ensure_lookup_table()
+ uv_layer = bm.loops.layers.uv.verify()
- if context.tool_settings.use_uv_select_sync:
- sel_faces = [f for f in bm.faces]
- else:
- sel_faces = [f for f in bm.faces if f.select]
+ if context.tool_settings.use_uv_select_sync:
+ sel_faces = [f for f in bm.faces]
+ else:
+ sel_faces = [f for f in bm.faces if f.select]
+ bm_list.append(bm)
+ uv_layer_list.append(uv_layer)
+ faces_list.append(sel_faces)
- flipped_info = common.get_flipped_uv_info(sel_faces, uv_layer)
+ flipped_info = common.get_flipped_uv_info(faces_list, uv_layer_list)
for info in flipped_info:
if context.tool_settings.use_uv_select_sync:
info["face"].select = True
else:
for l in info["face"].loops:
- l[uv_layer].select = True
+ l[info["uv_layer"]].select = True
- bmesh.update_edit_mesh(obj.data)
+ for o in objs:
+ bmesh.update_edit_mesh(o.data)
return {'FINISHED'}
diff --git a/magic_uv/op/smooth_uv.py b/magic_uv/op/smooth_uv.py
index 17068308..94e41367 100644
--- a/magic_uv/op/smooth_uv.py
+++ b/magic_uv/op/smooth_uv.py
@@ -20,8 +20,8 @@
__author__ = "imdjs, Nutti <nutti.metro@gmail.com>"
__status__ = "production"
-__version__ = "6.2"
-__date__ = "31 Jul 2019"
+__version__ = "6.3"
+__date__ = "10 Aug 2020"
import bpy
from bpy.props import BoolProperty, FloatProperty
@@ -167,8 +167,7 @@ class MUV_OT_SmoothUV(bpy.types.Operator):
# get target UV
for i in range(len(accm_uvlens[:-1])):
# get line segment which UV will be placed
- if ((accm_uvlens[i] <= target_length) and
- (accm_uvlens[i + 1] > target_length)):
+ if accm_uvlens[i] <= target_length < accm_uvlens[i + 1]:
tgt_seg_len = target_length - accm_uvlens[i]
seg_len = accm_uvlens[i + 1] - accm_uvlens[i]
uv1 = orig_uvs[i]
@@ -240,8 +239,7 @@ class MUV_OT_SmoothUV(bpy.types.Operator):
# get target UV
for i in range(len(accm_uv[:-1])):
# get line segment to be placed
- if ((accm_uv[i] <= target_length) and
- (accm_uv[i + 1] > target_length)):
+ if accm_uv[i] <= target_length < accm_uv[i + 1]:
tgt_seg_len = target_length - accm_uv[i]
seg_len = accm_uv[i + 1] - accm_uv[i]
uv1 = uvs[i]
diff --git a/magic_uv/op/texture_lock.py b/magic_uv/op/texture_lock.py
index 43d78549..ddcaf315 100644
--- a/magic_uv/op/texture_lock.py
+++ b/magic_uv/op/texture_lock.py
@@ -20,8 +20,8 @@
__author__ = "Nutti <nutti.metro@gmail.com>"
__status__ = "production"
-__version__ = "6.2"
-__date__ = "31 Jul 2019"
+__version__ = "6.3"
+__date__ = "10 Aug 2020"
import math
from math import atan2, cos, sqrt, sin, fabs
@@ -435,7 +435,7 @@ class MUV_OT_TextureLock_Intr(bpy.types.Operator):
bm.faces.ensure_lookup_table()
prev = set(self.__intr_verts)
- now = set([v.index for v in bm.verts if v.select])
+ now = {v.index for v in bm.verts if v.select}
return prev != now
diff --git a/magic_uv/op/texture_projection.py b/magic_uv/op/texture_projection.py
index 6ef6b1ce..b754dd88 100644
--- a/magic_uv/op/texture_projection.py
+++ b/magic_uv/op/texture_projection.py
@@ -20,8 +20,8 @@
__author__ = "Nutti <nutti.metro@gmail.com>"
__status__ = "production"
-__version__ = "6.2"
-__date__ = "31 Jul 2019"
+__version__ = "6.3"
+__date__ = "10 Aug 2020"
from collections import namedtuple
diff --git a/magic_uv/op/texture_wrap.py b/magic_uv/op/texture_wrap.py
index 9936a510..92512438 100644
--- a/magic_uv/op/texture_wrap.py
+++ b/magic_uv/op/texture_wrap.py
@@ -20,8 +20,8 @@
__author__ = "Nutti <nutti.metro@gmail.com>"
__status__ = "production"
-__version__ = "6.2"
-__date__ = "31 Jul 2019"
+__version__ = "6.3"
+__date__ = "10 Aug 2020"
import bpy
from bpy.props import (
diff --git a/magic_uv/op/transfer_uv.py b/magic_uv/op/transfer_uv.py
index b63376c9..ce9639a7 100644
--- a/magic_uv/op/transfer_uv.py
+++ b/magic_uv/op/transfer_uv.py
@@ -20,8 +20,8 @@
__author__ = "Nutti <nutti.metro@gmail.com>, Mifth, MaxRobinot"
__status__ = "production"
-__version__ = "6.2"
-__date__ = "31 Jul 2019"
+__version__ = "6.3"
+__date__ = "10 Aug 2020"
from collections import OrderedDict
diff --git a/magic_uv/op/unwrap_constraint.py b/magic_uv/op/unwrap_constraint.py
index bd78dafc..3c23575a 100644
--- a/magic_uv/op/unwrap_constraint.py
+++ b/magic_uv/op/unwrap_constraint.py
@@ -18,8 +18,8 @@
__author__ = "Nutti <nutti.metro@gmail.com>"
__status__ = "production"
-__version__ = "6.2"
-__date__ = "31 Jul 2019"
+__version__ = "6.3"
+__date__ = "10 Aug 2020"
import bpy
from bpy.props import (
diff --git a/magic_uv/op/uv_bounding_box.py b/magic_uv/op/uv_bounding_box.py
index 589abcc4..d4edac9c 100644
--- a/magic_uv/op/uv_bounding_box.py
+++ b/magic_uv/op/uv_bounding_box.py
@@ -20,8 +20,8 @@
__author__ = "Nutti <nutti.metro@gmail.com>"
__status__ = "production"
-__version__ = "6.2"
-__date__ = "31 Jul 2019"
+__version__ = "6.3"
+__date__ = "10 Aug 2020"
from enum import IntEnum
import math
@@ -438,10 +438,8 @@ class StateNone(StateBase):
mouse_view.x, mouse_view.y)
for i, p in enumerate(ctrl_points):
px, py = context.region.view2d.view_to_region(p.x, p.y)
- in_cp_x = (px + cp_react_size > x and
- px - cp_react_size < x)
- in_cp_y = (py + cp_react_size > y and
- py - cp_react_size < y)
+ in_cp_x = px - cp_react_size < x < px + cp_react_size
+ in_cp_y = py - cp_react_size < y < py + cp_react_size
if in_cp_x and in_cp_y:
if is_uscaling:
arr = [1, 3, 6, 8]
diff --git a/magic_uv/op/uv_inspection.py b/magic_uv/op/uv_inspection.py
index c5f92004..8aae181e 100644
--- a/magic_uv/op/uv_inspection.py
+++ b/magic_uv/op/uv_inspection.py
@@ -20,8 +20,11 @@
__author__ = "Nutti <nutti.metro@gmail.com>"
__status__ = "production"
-__version__ = "6.2"
-__date__ = "31 Jul 2019"
+__version__ = "6.3"
+__date__ = "10 Aug 2020"
+
+import random
+from math import fabs
import bpy
from bpy.props import BoolProperty, EnumProperty
@@ -65,19 +68,31 @@ def _update_uvinsp_info(context):
sc = context.scene
props = sc.muv_props.uv_inspection
- obj = context.active_object
- bm = bmesh.from_edit_mesh(obj.data)
- if common.check_version(2, 73, 0) >= 0:
- bm.faces.ensure_lookup_table()
- uv_layer = bm.loops.layers.uv.verify()
+ bm_list = []
+ uv_layer_list = []
+ faces_list = []
+ for o in bpy.data.objects:
+ if not compat.get_object_select(o):
+ continue
+ if o.type != 'MESH':
+ continue
+
+ bm = bmesh.from_edit_mesh(o.data)
+ if common.check_version(2, 73, 0) >= 0:
+ bm.faces.ensure_lookup_table()
+ uv_layer = bm.loops.layers.uv.verify()
+
+ if context.tool_settings.use_uv_select_sync:
+ sel_faces = [f for f in bm.faces]
+ else:
+ sel_faces = [f for f in bm.faces if f.select]
+ bm_list.append(bm)
+ uv_layer_list.append(uv_layer)
+ faces_list.append(sel_faces)
- if context.tool_settings.use_uv_select_sync:
- sel_faces = [f for f in bm.faces]
- else:
- sel_faces = [f for f in bm.faces if f.select]
props.overlapped_info = common.get_overlapped_uv_info(
- bm, sel_faces, uv_layer, sc.muv_uv_inspection_show_mode)
- props.flipped_info = common.get_flipped_uv_info(sel_faces, uv_layer)
+ bm_list, faces_list, uv_layer_list, sc.muv_uv_inspection_show_mode)
+ props.flipped_info = common.get_flipped_uv_info(faces_list, uv_layer_list)
@PropertyClassRegistry()
@@ -205,14 +220,15 @@ class MUV_OT_UVInspection_Render(bpy.types.Operator):
bgl.glColor4f(color[0], color[1], color[2], color[3])
for uv in poly:
x, y = context.region.view2d.view_to_region(
- uv.x, uv.y)
+ uv.x, uv.y, clip=False)
bgl.glVertex2f(x, y)
bgl.glEnd()
elif sc.muv_uv_inspection_show_mode == 'FACE':
bgl.glBegin(bgl.GL_TRIANGLE_FAN)
bgl.glColor4f(color[0], color[1], color[2], color[3])
for uv in info["subject_uvs"]:
- x, y = context.region.view2d.view_to_region(uv.x, uv.y)
+ x, y = context.region.view2d.view_to_region(
+ uv.x, uv.y, clip=False)
bgl.glVertex2f(x, y)
bgl.glEnd()
@@ -226,14 +242,15 @@ class MUV_OT_UVInspection_Render(bpy.types.Operator):
bgl.glColor4f(color[0], color[1], color[2], color[3])
for uv in poly:
x, y = context.region.view2d.view_to_region(
- uv.x, uv.y)
+ uv.x, uv.y, clip=False)
bgl.glVertex2f(x, y)
bgl.glEnd()
elif sc.muv_uv_inspection_show_mode == 'FACE':
bgl.glBegin(bgl.GL_TRIANGLE_FAN)
bgl.glColor4f(color[0], color[1], color[2], color[3])
for uv in info["uvs"]:
- x, y = context.region.view2d.view_to_region(uv.x, uv.y)
+ x, y = context.region.view2d.view_to_region(
+ uv.x, uv.y, clip=False)
bgl.glVertex2f(x, y)
bgl.glEnd()
@@ -279,3 +296,201 @@ class MUV_OT_UVInspection_Update(bpy.types.Operator):
context.area.tag_redraw()
return {'FINISHED'}
+
+
+@BlClassRegistry()
+class MUV_OT_UVInspection_PaintUVIsland(bpy.types.Operator):
+ """
+ Operation class: Paint UV island with random color.
+ """
+
+ bl_idname = "uv.muv_uv_inspection_paint_uv_island"
+ bl_label = "Paint UV Island"
+ bl_description = "Paint UV island with random color"
+ bl_options = {'REGISTER', 'UNDO'}
+
+ @classmethod
+ def poll(cls, context):
+ # we can not get area/space/region from console
+ if common.is_console_mode():
+ return True
+ return _is_valid_context(context)
+
+ def _get_or_new_image(self, name, width, height):
+ if name in bpy.data.images.keys():
+ return bpy.data.images[name]
+ return bpy.data.images.new(name, width, height)
+
+ def _get_or_new_material(self, name):
+ if name in bpy.data.materials.keys():
+ return bpy.data.materials[name]
+ return bpy.data.materials.new(name)
+
+ def _get_or_new_texture(self, name):
+ if name in bpy.data.textures.keys():
+ return bpy.data.textures[name]
+ return bpy.data.textures.new(name, 'IMAGE')
+
+ def _get_override_context(self, context):
+ for window in context.window_manager.windows:
+ screen = window.screen
+ for area in screen.areas:
+ if area.type == 'VIEW_3D':
+ for region in area.regions:
+ if region.type == 'WINDOW':
+ return {'window': window, 'screen': screen,
+ 'area': area, 'region': region}
+ return None
+
+ def _create_unique_color(self, exist_colors, allowable=0.1):
+ retry = 0
+ while retry < 20:
+ r = random.random()
+ g = random.random()
+ b = random.random()
+ new_color = [r, g, b]
+ for color in exist_colors:
+ if ((fabs(new_color[0] - color[0]) < allowable) and
+ (fabs(new_color[1] - color[1]) < allowable) and
+ (fabs(new_color[2] - color[2]) < allowable)):
+ break
+ else:
+ return new_color
+ return None
+
+ def execute(self, context):
+ obj = context.active_object
+ mode_orig = context.object.mode
+ override_context = self._get_override_context(context)
+ if override_context is None:
+ self.report({'WARNING'}, "More than one 'VIEW_3D' area must exist")
+ return {'CANCELLED'}
+
+ # Setup material of drawing target.
+ target_image = self._get_or_new_image(
+ "MagicUV_PaintUVIsland", 4096, 4096)
+ target_mtrl = self._get_or_new_material("MagicUV_PaintUVMaterial")
+ if compat.check_version(2, 80, 0) >= 0:
+ target_mtrl.use_nodes = True
+ output_node = target_mtrl.node_tree.nodes["Material Output"]
+ nodes_to_remove = [n for n in target_mtrl.node_tree.nodes
+ if n != output_node]
+ for n in nodes_to_remove:
+ target_mtrl.node_tree.nodes.remove(n)
+ texture_node = \
+ target_mtrl.node_tree.nodes.new("ShaderNodeTexImage")
+ texture_node.image = target_image
+ target_mtrl.node_tree.links.new(output_node.inputs["Surface"],
+ texture_node.outputs["Color"])
+ obj.data.use_paint_mask = True
+
+ # Apply material to object (all faces).
+ found = False
+ for mtrl_idx, mtrl_slot in enumerate(obj.material_slots):
+ if mtrl_slot.material == target_mtrl:
+ found = True
+ break
+ if not found:
+ bpy.ops.object.material_slot_add()
+ mtrl_idx = len(obj.material_slots) - 1
+ obj.material_slots[mtrl_idx].material = target_mtrl
+ bpy.ops.object.mode_set(mode='EDIT')
+ bm = bmesh.from_edit_mesh(obj.data)
+ bm.faces.ensure_lookup_table()
+ for f in bm.faces:
+ f.select = True
+ bmesh.update_edit_mesh(obj.data)
+ obj.active_material_index = mtrl_idx
+ obj.active_material = target_mtrl
+ bpy.ops.object.material_slot_assign()
+ else:
+ target_tex_slot = target_mtrl.texture_slots.add()
+ target_tex = self._get_or_new_texture("MagicUV_PaintUVTexture")
+ target_tex_slot.texture = target_tex
+ obj.data.use_paint_mask = True
+
+ # Apply material to object (all faces).
+ found = False
+ for mtrl_idx, mtrl_slot in enumerate(obj.material_slots):
+ if mtrl_slot.material == target_mtrl:
+ found = True
+ break
+ if not found:
+ bpy.ops.object.material_slot_add()
+ mtrl_idx = len(obj.material_slots) - 1
+ obj.material_slots[mtrl_idx].material = target_mtrl
+ bpy.ops.object.mode_set(mode='EDIT')
+ bm = bmesh.from_edit_mesh(obj.data)
+ bm.faces.ensure_lookup_table()
+ for f in bm.faces:
+ f.select = True
+ bmesh.update_edit_mesh(obj.data)
+ obj.active_material_index = mtrl_idx
+ obj.active_material = target_mtrl
+ bpy.ops.object.material_slot_assign()
+
+ # Update active image in Image Editor.
+ _, _, space = common.get_space(
+ 'IMAGE_EDITOR', 'WINDOW', 'IMAGE_EDITOR')
+ if space is None:
+ return {'CANCELLED'}
+ space.image = target_image
+
+ # Analyze island to make map between face and paint color.
+ islands = common.get_island_info_from_bmesh(bm)
+ color_to_faces = []
+ for isl in islands:
+ color = self._create_unique_color([c[0] for c in color_to_faces])
+ if color is None:
+ self.report({'WARNING'},
+ "Failed to create color. Please try again")
+ return {'CANCELLED'}
+ indices = [f["face"].index for f in isl["faces"]]
+ color_to_faces.append((color, indices))
+
+ for cf in color_to_faces:
+ # Update selection information.
+ bpy.ops.object.mode_set(mode='EDIT')
+ bm = bmesh.from_edit_mesh(obj.data)
+ bm.faces.ensure_lookup_table()
+ for f in bm.faces:
+ f.select = False
+ for fidx in cf[1]:
+ bm.faces[fidx].select = True
+ bmesh.update_edit_mesh(obj.data)
+ bpy.ops.object.mode_set(mode='OBJECT')
+
+ # Update brush color.
+ bpy.data.brushes["Fill"].color = cf[0]
+
+ # Paint.
+ bpy.ops.object.mode_set(mode='TEXTURE_PAINT')
+ if compat.check_version(2, 80, 0) >= 0:
+ bpy.ops.paint.brush_select(override_context, image_tool='FILL')
+ else:
+ paint_settings = \
+ bpy.data.scenes['Scene'].tool_settings.image_paint
+ paint_mode_orig = paint_settings.mode
+ paint_canvas_orig = paint_settings.canvas
+ paint_settings.mode = 'IMAGE'
+ paint_settings.canvas = target_image
+ bpy.ops.paint.brush_select(override_context,
+ texture_paint_tool='FILL')
+ bpy.ops.paint.image_paint(override_context, stroke=[{
+ "name": "",
+ "location": (0, 0, 0),
+ "mouse": (0, 0),
+ "size": 0,
+ "pressure": 0,
+ "pen_flip": False,
+ "time": 0,
+ "is_start": False
+ }])
+
+ if compat.check_version(2, 80, 0) < 0:
+ paint_settings.mode = paint_mode_orig
+ paint_settings.canvas = paint_canvas_orig
+
+ bpy.ops.object.mode_set(mode=mode_orig)
+
+ return {'FINISHED'}
diff --git a/magic_uv/op/uv_sculpt.py b/magic_uv/op/uv_sculpt.py
index ff3a9db3..f40ab253 100644
--- a/magic_uv/op/uv_sculpt.py
+++ b/magic_uv/op/uv_sculpt.py
@@ -20,8 +20,8 @@
__author__ = "Nutti <nutti.metro@gmail.com>"
__status__ = "production"
-__version__ = "6.2"
-__date__ = "31 Jul 2019"
+__version__ = "6.3"
+__date__ = "10 Aug 2020"
from math import pi, cos, tan, sin
@@ -168,6 +168,20 @@ class _Properties:
del scene.muv_uv_sculpt_relax_method
+def location_3d_to_region_2d_extra(region, rv3d, coord):
+ coord_2d = view3d_utils.location_3d_to_region_2d(region, rv3d, coord)
+ if coord_2d is None:
+ prj = rv3d.perspective_matrix @ Vector(
+ (coord[0], coord[1], coord[2], 1.0))
+ width_half = region.width / 2.0
+ height_half = region.height / 2.0
+ coord_2d = Vector((
+ width_half + width_half * (prj.x / prj.w),
+ height_half + height_half * (prj.y / prj.w)
+ ))
+ return coord_2d
+
+
@BlClassRegistry()
class MUV_OT_UVSculpt(bpy.types.Operator):
"""
@@ -263,7 +277,7 @@ class MUV_OT_UVSculpt(bpy.types.Operator):
if not f.select:
continue
for i, l in enumerate(f.loops):
- loc_2d = view3d_utils.location_3d_to_region_2d(
+ loc_2d = location_3d_to_region_2d_extra(
region, space.region_3d,
compat.matmul(world_mat, l.vert.co))
diff = loc_2d - self.__initial_mco
@@ -301,7 +315,7 @@ class MUV_OT_UVSculpt(bpy.types.Operator):
if not f.select:
continue
for i, l in enumerate(f.loops):
- loc_2d = view3d_utils.location_3d_to_region_2d(
+ loc_2d = location_3d_to_region_2d_extra(
region, space.region_3d,
compat.matmul(world_mat, l.vert.co))
diff = loc_2d - self.__initial_mco
@@ -393,7 +407,7 @@ class MUV_OT_UVSculpt(bpy.types.Operator):
if not f.select:
continue
for i, l in enumerate(f.loops):
- loc_2d = view3d_utils.location_3d_to_region_2d(
+ loc_2d = location_3d_to_region_2d_extra(
region, space.region_3d,
compat.matmul(world_mat, l.vert.co))
diff = loc_2d - self.__initial_mco
diff --git a/magic_uv/op/uvw.py b/magic_uv/op/uvw.py
index 4b4a4f04..fca72d2c 100644
--- a/magic_uv/op/uvw.py
+++ b/magic_uv/op/uvw.py
@@ -20,8 +20,8 @@
__author__ = "Alexander Milovsky, Nutti <nutti.metro@gmail.com>"
__status__ = "production"
-__version__ = "6.2"
-__date__ = "31 Jul 2019"
+__version__ = "6.3"
+__date__ = "10 Aug 2020"
from math import sin, cos, pi
@@ -228,20 +228,26 @@ class MUV_OT_UVW_BoxMap(bpy.types.Operator):
return True
return _is_valid_context(context)
- def execute(self, context):
- obj = context.active_object
- bm = bmesh.from_edit_mesh(obj.data)
- if common.check_version(2, 73, 0) >= 0:
- bm.faces.ensure_lookup_table()
+ def execute(self, _):
+ if compat.check_version(2, 80, 0) < 0:
+ objs = [bpy.context.active_object]
+ else:
+ objs = [o for o in bpy.data.objects
+ if compat.get_object_select(o) and o.type == 'MESH']
+
+ for o in objs:
+ bm = bmesh.from_edit_mesh(o.data)
+ if common.check_version(2, 73, 0) >= 0:
+ bm.faces.ensure_lookup_table()
- # get UV layer
- uv_layer = _get_uv_layer(self, bm, self.assign_uvmap)
- if not uv_layer:
- return {'CANCELLED'}
+ # get UV layer
+ uv_layer = _get_uv_layer(self, bm, self.assign_uvmap)
+ if not uv_layer:
+ return {'CANCELLED'}
- _apply_box_map(bm, uv_layer, self.size, self.offset, self.rotation,
- self.tex_aspect)
- bmesh.update_edit_mesh(obj.data)
+ _apply_box_map(bm, uv_layer, self.size, self.offset, self.rotation,
+ self.tex_aspect)
+ bmesh.update_edit_mesh(o.data)
return {'FINISHED'}
@@ -285,20 +291,26 @@ class MUV_OT_UVW_BestPlanerMap(bpy.types.Operator):
return True
return _is_valid_context(context)
- def execute(self, context):
- obj = context.active_object
- bm = bmesh.from_edit_mesh(obj.data)
- if common.check_version(2, 73, 0) >= 0:
- bm.faces.ensure_lookup_table()
+ def execute(self, _):
+ if compat.check_version(2, 80, 0) < 0:
+ objs = [bpy.context.active_object]
+ else:
+ objs = [o for o in bpy.data.objects
+ if compat.get_object_select(o) and o.type == 'MESH']
+
+ for o in objs:
+ bm = bmesh.from_edit_mesh(o.data)
+ if common.check_version(2, 73, 0) >= 0:
+ bm.faces.ensure_lookup_table()
- # get UV layer
- uv_layer = _get_uv_layer(self, bm, self.assign_uvmap)
- if not uv_layer:
- return {'CANCELLED'}
+ # get UV layer
+ uv_layer = _get_uv_layer(self, bm, self.assign_uvmap)
+ if not uv_layer:
+ return {'CANCELLED'}
- _apply_planer_map(bm, uv_layer, self.size, self.offset, self.rotation,
- self.tex_aspect)
+ _apply_planer_map(bm, uv_layer, self.size, self.offset,
+ self.rotation, self.tex_aspect)
- bmesh.update_edit_mesh(obj.data)
+ bmesh.update_edit_mesh(o.data)
return {'FINISHED'}
diff --git a/magic_uv/op/world_scale_uv.py b/magic_uv/op/world_scale_uv.py
index 0107fc6f..9ed86eb0 100644
--- a/magic_uv/op/world_scale_uv.py
+++ b/magic_uv/op/world_scale_uv.py
@@ -20,8 +20,8 @@
__author__ = "McBuff, Nutti <nutti.metro@gmail.com>"
__status__ = "production"
-__version__ = "6.2"
-__date__ = "31 Jul 2019"
+__version__ = "6.3"
+__date__ = "10 Aug 2020"
from math import sqrt
@@ -31,7 +31,6 @@ from bpy.props import (
FloatProperty,
IntVectorProperty,
BoolProperty,
- StringProperty,
)
import bmesh
from mathutils import Vector
@@ -63,9 +62,34 @@ def _is_valid_context(context):
return True
-def _measure_wsuv_info(obj, method='FIRST', tex_size=None):
- mesh_area = common.measure_mesh_area(obj)
- uv_area = common.measure_uv_area(obj, method, tex_size)
+def _measure_wsuv_info(obj, calc_method='MESH',
+ tex_selection_method='FIRST', tex_size=None,
+ only_selected=True):
+ mesh_areas = common.measure_mesh_area(obj, calc_method, only_selected)
+ uv_areas = common.measure_uv_area(obj, calc_method, tex_selection_method,
+ tex_size, only_selected)
+
+ if not uv_areas:
+ return None, mesh_areas, None
+
+ if len(mesh_areas) != len(uv_areas):
+ raise ValueError("mesh_area and uv_area must be same length")
+
+ densities = []
+ for mesh_area, uv_area in zip(mesh_areas, uv_areas):
+ if mesh_area == 0.0:
+ densities.append(0.0)
+ else:
+ densities.append(sqrt(uv_area) / sqrt(mesh_area))
+
+ return uv_areas, mesh_areas, densities
+
+
+def _measure_wsuv_info_from_faces(obj, faces, uv_layer, tex_layer,
+ tex_selection_method='FIRST', tex_size=None):
+ mesh_area = common.measure_mesh_area_from_faces(faces)
+ uv_area = common.measure_uv_area_from_faces(
+ obj, faces, uv_layer, tex_layer, tex_selection_method, tex_size)
if not uv_area:
return None, mesh_area, None
@@ -78,22 +102,12 @@ def _measure_wsuv_info(obj, method='FIRST', tex_size=None):
return uv_area, mesh_area, density
-def _apply(obj, origin, factor):
- bm = bmesh.from_edit_mesh(obj.data)
- if common.check_version(2, 73, 0) >= 0:
- bm.verts.ensure_lookup_table()
- bm.edges.ensure_lookup_table()
- bm.faces.ensure_lookup_table()
-
- sel_faces = [f for f in bm.faces if f.select]
-
- uv_layer = bm.loops.layers.uv.verify()
-
+def _apply(faces, uv_layer, origin, factor):
# calculate origin
if origin == 'CENTER':
origin = Vector((0.0, 0.0))
num = 0
- for f in sel_faces:
+ for f in faces:
for l in f.loops:
uv = l[uv_layer].uv
origin = origin + uv
@@ -101,7 +115,7 @@ def _apply(obj, origin, factor):
origin = origin / num
elif origin == 'LEFT_TOP':
origin = Vector((100000.0, -100000.0))
- for f in sel_faces:
+ for f in faces:
for l in f.loops:
uv = l[uv_layer].uv
origin.x = min(origin.x, uv.x)
@@ -109,7 +123,7 @@ def _apply(obj, origin, factor):
elif origin == 'LEFT_CENTER':
origin = Vector((100000.0, 0.0))
num = 0
- for f in sel_faces:
+ for f in faces:
for l in f.loops:
uv = l[uv_layer].uv
origin.x = min(origin.x, uv.x)
@@ -118,7 +132,7 @@ def _apply(obj, origin, factor):
origin.y = origin.y / num
elif origin == 'LEFT_BOTTOM':
origin = Vector((100000.0, 100000.0))
- for f in sel_faces:
+ for f in faces:
for l in f.loops:
uv = l[uv_layer].uv
origin.x = min(origin.x, uv.x)
@@ -126,7 +140,7 @@ def _apply(obj, origin, factor):
elif origin == 'CENTER_TOP':
origin = Vector((0.0, -100000.0))
num = 0
- for f in sel_faces:
+ for f in faces:
for l in f.loops:
uv = l[uv_layer].uv
origin.x = origin.x + uv.x
@@ -136,7 +150,7 @@ def _apply(obj, origin, factor):
elif origin == 'CENTER_BOTTOM':
origin = Vector((0.0, 100000.0))
num = 0
- for f in sel_faces:
+ for f in faces:
for l in f.loops:
uv = l[uv_layer].uv
origin.x = origin.x + uv.x
@@ -145,7 +159,7 @@ def _apply(obj, origin, factor):
origin.x = origin.x / num
elif origin == 'RIGHT_TOP':
origin = Vector((-100000.0, -100000.0))
- for f in sel_faces:
+ for f in faces:
for l in f.loops:
uv = l[uv_layer].uv
origin.x = max(origin.x, uv.x)
@@ -153,7 +167,7 @@ def _apply(obj, origin, factor):
elif origin == 'RIGHT_CENTER':
origin = Vector((-100000.0, 0.0))
num = 0
- for f in sel_faces:
+ for f in faces:
for l in f.loops:
uv = l[uv_layer].uv
origin.x = max(origin.x, uv.x)
@@ -162,21 +176,19 @@ def _apply(obj, origin, factor):
origin.y = origin.y / num
elif origin == 'RIGHT_BOTTOM':
origin = Vector((-100000.0, 100000.0))
- for f in sel_faces:
+ for f in faces:
for l in f.loops:
uv = l[uv_layer].uv
origin.x = max(origin.x, uv.x)
origin.y = min(origin.y, uv.y)
# update UV coordinate
- for f in sel_faces:
+ for f in faces:
for l in f.loops:
uv = l[uv_layer].uv
diff = uv - origin
l[uv_layer].uv = origin + diff * factor
- bmesh.update_edit_mesh(obj.data)
-
def _get_target_textures(_, __):
images = common.find_images(bpy.context.active_object)
@@ -207,7 +219,8 @@ class _Properties:
)
scene.muv_world_scale_uv_src_uv_area = FloatProperty(
name="UV Area",
- description="Source UV Area",
+ description="Source UV Area (Average if calculation method is UV "
+ "Island or Face)",
default=0.0,
min=0.0
)
@@ -277,6 +290,26 @@ class _Properties:
description="Texture to be applied",
items=_get_target_textures
)
+ scene.muv_world_scale_uv_tgt_area_calc_method = EnumProperty(
+ name="Area Calculation Method",
+ description="How to calculate target area",
+ items=[
+ ('MESH', "Mesh", "Calculate area by whole faces in mesh"),
+ ('UV ISLAND', "UV Island", "Calculate area each UV islands"),
+ ('FACE', "Face", "Calculate area each face")
+ ],
+ default='MESH'
+ )
+ scene.muv_world_scale_uv_measure_only_selected = BoolProperty(
+ name="Only Selected",
+ description="Measure with only selected faces",
+ default=True,
+ )
+ scene.muv_world_scale_uv_apply_only_selected = BoolProperty(
+ name="Only Selected",
+ description="Apply to only selected faces",
+ default=True,
+ )
@classmethod
def del_props(cls, scene):
@@ -290,6 +323,9 @@ class _Properties:
del scene.muv_world_scale_uv_origin
del scene.muv_world_scale_uv_measure_tgt_texture
del scene.muv_world_scale_uv_apply_tgt_texture
+ del scene.muv_world_scale_uv_tgt_area_calc_method
+ del scene.muv_world_scale_uv_measure_only_selected
+ del scene.muv_world_scale_uv_apply_only_selected
@BlClassRegistry()
@@ -304,10 +340,15 @@ class MUV_OT_WorldScaleUV_Measure(bpy.types.Operator):
bl_description = "Measure face size for scale calculation"
bl_options = {'REGISTER', 'UNDO'}
- tgt_texture = StringProperty(
+ tgt_texture = EnumProperty(
name="Texture",
- description="Texture to be measured",
- default="[Average]"
+ description="Texture to be applied",
+ items=_get_target_textures
+ )
+ only_selected = BoolProperty(
+ name="Only Selected",
+ description="Measure with only selected faces",
+ default=True,
)
@classmethod
@@ -317,32 +358,44 @@ class MUV_OT_WorldScaleUV_Measure(bpy.types.Operator):
return True
return _is_valid_context(context)
+ @staticmethod
+ def setup_argument(ops, scene):
+ ops.tgt_texture = scene.muv_world_scale_uv_measure_tgt_texture
+ ops.only_selected = scene.muv_world_scale_uv_measure_only_selected
+
def execute(self, context):
sc = context.scene
obj = context.active_object
if self.tgt_texture == "[Average]":
- uv_area, mesh_area, density = _measure_wsuv_info(obj, 'AVERAGE')
+ uv_areas, mesh_areas, densities = _measure_wsuv_info(
+ obj, calc_method='MESH', tex_selection_method='AVERAGE',
+ only_selected=self.only_selected)
elif self.tgt_texture == "[Max]":
- uv_area, mesh_area, density = _measure_wsuv_info(obj, 'MAX')
+ uv_areas, mesh_areas, densities = _measure_wsuv_info(
+ obj, calc_method='MESH', tex_selection_method='MAX',
+ only_selected=self.only_selected)
elif self.tgt_texture == "[Min]":
- uv_area, mesh_area, density = _measure_wsuv_info(obj, 'MIN')
+ uv_areas, mesh_areas, densities = _measure_wsuv_info(
+ obj, calc_method='MESH', tex_selection_method='MIN',
+ only_selected=self.only_selected)
else:
texture = bpy.data.images[self.tgt_texture]
- uv_area, mesh_area, density = _measure_wsuv_info(
- obj, 'USER_SPECIFIED', texture.size)
- if not uv_area:
+ uv_areas, mesh_areas, densities = _measure_wsuv_info(
+ obj, calc_method='MESH', tex_selection_method='USER_SPECIFIED',
+ only_selected=self.only_selected, tex_size=texture.size)
+ if not uv_areas:
self.report({'WARNING'},
"Object must have more than one UV map and texture")
return {'CANCELLED'}
- sc.muv_world_scale_uv_src_uv_area = uv_area
- sc.muv_world_scale_uv_src_mesh_area = mesh_area
- sc.muv_world_scale_uv_src_density = density
+ sc.muv_world_scale_uv_src_uv_area = uv_areas[0]
+ sc.muv_world_scale_uv_src_mesh_area = mesh_areas[0]
+ sc.muv_world_scale_uv_src_density = densities[0]
self.report({'INFO'},
"UV Area: {0}, Mesh Area: {1}, Texel Density: {2}"
- .format(uv_area, mesh_area, density))
+ .format(uv_areas[0], mesh_areas[0], densities[0]))
return {'FINISHED'}
@@ -395,6 +448,21 @@ class MUV_OT_WorldScaleUV_ApplyManual(bpy.types.Operator):
default=True,
options={'HIDDEN', 'SKIP_SAVE'}
)
+ tgt_area_calc_method = EnumProperty(
+ name="Area Calculation Method",
+ description="How to calculate target area",
+ items=[
+ ('MESH', "Mesh", "Calculate area by whole faces in mesh"),
+ ('UV ISLAND', "UV Island", "Calculate area each UV islands"),
+ ('FACE', "Face", "Calculate area each face")
+ ],
+ default='MESH'
+ )
+ only_selected = BoolProperty(
+ name="Only Selected",
+ description="Apply to only selected faces",
+ default=True,
+ )
@classmethod
def poll(cls, context):
@@ -403,6 +471,16 @@ class MUV_OT_WorldScaleUV_ApplyManual(bpy.types.Operator):
return True
return _is_valid_context(context)
+ @staticmethod
+ def setup_argument(ops, scene):
+ ops.tgt_density = scene.muv_world_scale_uv_tgt_density
+ ops.tgt_texture_size = scene.muv_world_scale_uv_tgt_texture_size
+ ops.origin = scene.muv_world_scale_uv_origin
+ ops.show_dialog = False
+ ops.tgt_area_calc_method = \
+ scene.muv_world_scale_uv_tgt_area_calc_method
+ ops.only_selected = scene.muv_world_scale_uv_apply_only_selected
+
def __apply_manual(self, context):
obj = context.active_object
bm = bmesh.from_edit_mesh(obj.data)
@@ -411,27 +489,47 @@ class MUV_OT_WorldScaleUV_ApplyManual(bpy.types.Operator):
bm.edges.ensure_lookup_table()
bm.faces.ensure_lookup_table()
- tex_size = self.tgt_texture_size
- uv_area, _, density = _measure_wsuv_info(obj, 'USER_SPECIFIED',
- tex_size)
- if not uv_area:
+ if not bm.loops.layers.uv:
self.report({'WARNING'}, "Object must have more than one UV map")
return {'CANCELLED'}
+ uv_layer = bm.loops.layers.uv.verify()
+ tex_layer = common.find_texture_layer(bm)
+ faces_list = common.get_faces_list(
+ bm, self.tgt_area_calc_method, self.only_selected)
+
+ tex_size = self.tgt_texture_size
+
+ factors = []
+ for faces in faces_list:
+ uv_area, _, density = _measure_wsuv_info_from_faces(
+ obj, faces, uv_layer, tex_layer,
+ tex_selection_method='USER_SPECIFIED', tex_size=tex_size)
- tgt_density = self.tgt_density
- factor = tgt_density / density
+ if not uv_area:
+ self.report({'WARNING'},
+ "Object must have more than one UV map")
+ return {'CANCELLED'}
- _apply(context.active_object, self.origin, factor)
- self.report({'INFO'}, "Scaling factor: {0}".format(factor))
+ tgt_density = self.tgt_density
+ factor = tgt_density / density
+
+ _apply(faces, uv_layer, self.origin, factor)
+ factors.append(factor)
+
+ bmesh.update_edit_mesh(obj.data)
+ self.report({'INFO'}, "Scaling factor: {0}".format(factors))
return {'FINISHED'}
def draw(self, _):
layout = self.layout
- layout.prop(self, "tgt_density")
+ layout.label(text="Target:")
+ layout.prop(self, "only_selected")
layout.prop(self, "tgt_texture_size")
+ layout.prop(self, "tgt_density")
layout.prop(self, "origin")
+ layout.prop(self, "tgt_area_calc_method")
layout.separator()
@@ -500,10 +598,25 @@ class MUV_OT_WorldScaleUV_ApplyScalingDensity(bpy.types.Operator):
default=True,
options={'HIDDEN', 'SKIP_SAVE'}
)
- tgt_texture = StringProperty(
+ tgt_texture = EnumProperty(
name="Texture",
description="Texture to be applied",
- default="[Average]"
+ items=_get_target_textures
+ )
+ tgt_area_calc_method = EnumProperty(
+ name="Area Calculation Method",
+ description="How to calculate target area",
+ items=[
+ ('MESH', "Mesh", "Calculate area by whole faces in mesh"),
+ ('UV ISLAND', "UV Island", "Calculate area each UV islands"),
+ ('FACE', "Face", "Calculate area each face")
+ ],
+ default='MESH'
+ )
+ only_selected = BoolProperty(
+ name="Only Selected",
+ description="Apply to only selected faces",
+ default=True,
)
@classmethod
@@ -513,6 +626,19 @@ class MUV_OT_WorldScaleUV_ApplyScalingDensity(bpy.types.Operator):
return True
return _is_valid_context(context)
+ @staticmethod
+ def setup_argument(ops, scene):
+ ops.tgt_scaling_factor = \
+ scene.muv_world_scale_uv_tgt_scaling_factor
+ ops.origin = scene.muv_world_scale_uv_origin
+ ops.src_density = scene.muv_world_scale_uv_src_density
+ ops.same_density = False
+ ops.show_dialog = False
+ ops.tgt_texture = scene.muv_world_scale_uv_apply_tgt_texture
+ ops.tgt_area_calc_method = \
+ scene.muv_world_scale_uv_tgt_area_calc_method
+ ops.only_selected = scene.muv_world_scale_uv_apply_only_selected
+
def __apply_scaling_density(self, context):
obj = context.active_object
bm = bmesh.from_edit_mesh(obj.data)
@@ -521,26 +647,49 @@ class MUV_OT_WorldScaleUV_ApplyScalingDensity(bpy.types.Operator):
bm.edges.ensure_lookup_table()
bm.faces.ensure_lookup_table()
- if self.tgt_texture == "[Average]":
- uv_area, _, density = _measure_wsuv_info(obj, 'AVERAGE')
- elif self.tgt_texture == "[Max]":
- uv_area, _, density = _measure_wsuv_info(obj, 'MAX')
- elif self.tgt_texture == "[Min]":
- uv_area, _, density = _measure_wsuv_info(obj, 'MIN')
- else:
- tgt_texture = bpy.data.images[self.tgt_texture]
- uv_area, _, density = _measure_wsuv_info(obj, 'USER_SPECIFIED',
- tgt_texture.size)
- if not uv_area:
- self.report({'WARNING'},
- "Object must have more than one UV map and texture")
+ if not bm.loops.layers.uv:
+ self.report({'WARNING'}, "Object must have more than one UV map")
return {'CANCELLED'}
+ uv_layer = bm.loops.layers.uv.verify()
+ tex_layer = common.find_texture_layer(bm)
+ faces_list = common.get_faces_list(
+ bm, self.tgt_area_calc_method, self.only_selected)
+
+ factors = []
+ for faces in faces_list:
+ if self.tgt_texture == "[Average]":
+ uv_area, _, density = _measure_wsuv_info_from_faces(
+ obj, faces, uv_layer, tex_layer,
+ tex_selection_method='AVERAGE')
+ elif self.tgt_texture == "[Max]":
+ uv_area, _, density = _measure_wsuv_info_from_faces(
+ obj, faces, uv_layer, tex_layer,
+ tex_selection_method='MAX')
+ elif self.tgt_texture == "[Min]":
+ uv_area, _, density = _measure_wsuv_info_from_faces(
+ obj, faces, uv_layer, tex_layer,
+ tex_selection_method='MIN')
+ else:
+ tgt_texture = bpy.data.images[self.tgt_texture]
+ uv_area, _, density = _measure_wsuv_info_from_faces(
+ obj, faces, uv_layer, tex_layer,
+ tex_selection_method='USER_SPECIFIED',
+ tex_size=tgt_texture.size)
+
+ if not uv_area:
+ self.report({'WARNING'},
+ "Object must have more than one UV map and "
+ "texture")
+ return {'CANCELLED'}
- tgt_density = self.src_density * self.tgt_scaling_factor
- factor = tgt_density / density
+ tgt_density = self.src_density * self.tgt_scaling_factor
+ factor = tgt_density / density
- _apply(context.active_object, self.origin, factor)
- self.report({'INFO'}, "Scaling factor: {0}".format(factor))
+ _apply(faces, uv_layer, self.origin, factor)
+ factors.append(factor)
+
+ bmesh.update_edit_mesh(obj.data)
+ self.report({'INFO'}, "Scaling factor: {0}".format(factors))
return {'FINISHED'}
@@ -554,9 +703,13 @@ class MUV_OT_WorldScaleUV_ApplyScalingDensity(bpy.types.Operator):
layout.separator()
+ layout.label(text="Target:")
if not self.same_density:
layout.prop(self, "tgt_scaling_factor")
+ layout.prop(self, "only_selected")
+ layout.prop(self, "tgt_texture")
layout.prop(self, "origin")
+ layout.prop(self, "tgt_area_calc_method")
layout.separator()
@@ -640,10 +793,25 @@ class MUV_OT_WorldScaleUV_ApplyProportionalToMesh(bpy.types.Operator):
default=True,
options={'HIDDEN', 'SKIP_SAVE'}
)
- tgt_texture = StringProperty(
+ tgt_texture = EnumProperty(
name="Texture",
description="Texture to be applied",
- default="[Average]"
+ items=_get_target_textures
+ )
+ tgt_area_calc_method = EnumProperty(
+ name="Area Calculation Method",
+ description="How to calculate target area",
+ items=[
+ ('MESH', "Mesh", "Calculate area by whole faces in mesh"),
+ ('UV ISLAND', "UV Island", "Calculate area each UV islands"),
+ ('FACE', "Face", "Calculate area each face")
+ ],
+ default='MESH'
+ )
+ only_selected = BoolProperty(
+ name="Only Selected",
+ description="Apply to only selected faces",
+ default=True,
)
@classmethod
@@ -653,6 +821,18 @@ class MUV_OT_WorldScaleUV_ApplyProportionalToMesh(bpy.types.Operator):
return True
return _is_valid_context(context)
+ @staticmethod
+ def setup_argument(ops, scene):
+ ops.origin = scene.muv_world_scale_uv_origin
+ ops.src_density = scene.muv_world_scale_uv_src_density
+ ops.src_uv_area = scene.muv_world_scale_uv_src_uv_area
+ ops.src_mesh_area = scene.muv_world_scale_uv_src_mesh_area
+ ops.show_dialog = False
+ ops.tgt_texture = scene.muv_world_scale_uv_apply_tgt_texture
+ ops.tgt_area_calc_method = \
+ scene.muv_world_scale_uv_tgt_area_calc_method
+ ops.only_selected = scene.muv_world_scale_uv_apply_only_selected
+
def __apply_proportional_to_mesh(self, context):
obj = context.active_object
bm = bmesh.from_edit_mesh(obj.data)
@@ -661,28 +841,49 @@ class MUV_OT_WorldScaleUV_ApplyProportionalToMesh(bpy.types.Operator):
bm.edges.ensure_lookup_table()
bm.faces.ensure_lookup_table()
- if self.tgt_texture == "[Average]":
- uv_area, mesh_area, density = _measure_wsuv_info(obj, 'AVERAGE')
- elif self.tgt_texture == "[Max]":
- uv_area, mesh_area, density = _measure_wsuv_info(obj, 'MAX')
- elif self.tgt_texture == "[Min]":
- uv_area, mesh_area, density = _measure_wsuv_info(obj, 'MIN')
- else:
- tgt_texture = bpy.data.images[self.tgt_texture]
- uv_area, mesh_area, density = _measure_wsuv_info(
- obj, 'USER_SPECIFIED', tgt_texture.size)
- if not uv_area:
- self.report({'WARNING'},
- "Object must have more than one UV map and texture")
+ if not bm.loops.layers.uv:
+ self.report({'WARNING'}, "Object must have more than one UV map")
return {'CANCELLED'}
-
- tgt_density = self.src_density * sqrt(mesh_area) / sqrt(
- self.src_mesh_area)
-
- factor = tgt_density / density
-
- _apply(context.active_object, self.origin, factor)
- self.report({'INFO'}, "Scaling factor: {0}".format(factor))
+ uv_layer = bm.loops.layers.uv.verify()
+ tex_layer = common.find_texture_layer(bm)
+ faces_list = common.get_faces_list(
+ bm, self.tgt_area_calc_method, self.only_selected)
+
+ factors = []
+ for faces in faces_list:
+ if self.tgt_texture == "[Average]":
+ uv_area, mesh_area, density = _measure_wsuv_info_from_faces(
+ obj, faces, uv_layer, tex_layer,
+ tex_selection_method='AVERAGE')
+ elif self.tgt_texture == "[Max]":
+ uv_area, mesh_area, density = _measure_wsuv_info_from_faces(
+ obj, faces, uv_layer, tex_layer,
+ tex_selection_method='MAX')
+ elif self.tgt_texture == "[Min]":
+ uv_area, mesh_area, density = _measure_wsuv_info_from_faces(
+ obj, faces, uv_layer, tex_layer,
+ tex_selection_method='MIN')
+ else:
+ tgt_texture = bpy.data.images[self.tgt_texture]
+ uv_area, mesh_area, density = _measure_wsuv_info_from_faces(
+ obj, faces, uv_layer, tex_layer,
+ tex_selection_method='USER_SPECIFIED',
+ tex_size=tgt_texture.size)
+ if not uv_area:
+ self.report({'WARNING'},
+ "Object must have more than one UV map and "
+ "texture")
+ return {'CANCELLED'}
+
+ tgt_density = self.src_density * sqrt(mesh_area) / sqrt(
+ self.src_mesh_area)
+ factor = tgt_density / density
+
+ _apply(faces, uv_layer, self.origin, factor)
+ factors.append(factor)
+
+ bmesh.update_edit_mesh(obj.data)
+ self.report({'INFO'}, "Scaling factor: {0}".format(factors))
return {'FINISHED'}
@@ -697,7 +898,12 @@ class MUV_OT_WorldScaleUV_ApplyProportionalToMesh(bpy.types.Operator):
col.enabled = False
layout.separator()
+
+ layout.label(text="Target:")
+ layout.prop(self, "only_selected")
layout.prop(self, "origin")
+ layout.prop(self, "tgt_area_calc_method")
+ layout.prop(self, "tgt_texture")
layout.separator()
diff --git a/magic_uv/preferences.py b/magic_uv/preferences.py
index 6d66b308..926ec728 100644
--- a/magic_uv/preferences.py
+++ b/magic_uv/preferences.py
@@ -20,8 +20,8 @@
__author__ = "Nutti <nutti.metro@gmail.com>"
__status__ = "production"
-__version__ = "6.2"
-__date__ = "31 Jul 2019"
+__version__ = "6.3"
+__date__ = "10 Aug 2020"
import bpy
from bpy.props import (
@@ -33,6 +33,7 @@ from bpy.props import (
from bpy.types import AddonPreferences
from . import common
+from .op.clip_uv import MUV_OT_ClipUV
from .op.flip_rotate_uv import MUV_OT_FlipRotateUV
from .op.mirror_uv import MUV_OT_MirrorUV
from .op.move_uv import MUV_OT_MoveUV
@@ -122,13 +123,17 @@ def image_uvs_menu_fn(self, context):
sc = context.scene
layout.separator()
- # Copy/Paste UV (on UV/Image Editor)
layout.label(text="Copy/Paste UV", icon=compat.icon('IMAGE'))
+ # Copy/Paste UV (on UV/Image Editor)
layout.menu(MUV_MT_CopyPasteUV_UVEdit.bl_idname, text="Copy/Paste UV")
layout.separator()
- # Pack UV
layout.label(text="UV Manipulation", icon=compat.icon('IMAGE'))
+ # Clip UV
+ ops = layout.operator(MUV_OT_ClipUV.bl_idname, text="Clip UV")
+ ops.clip_uv_range_max = sc.muv_clip_uv_range_max
+ ops.clip_uv_range_min = sc.muv_clip_uv_range_min
+ # Pack UV
ops = layout.operator(MUV_OT_PackUV.bl_idname, text="Pack UV")
ops.allowable_center_deviation = sc.muv_pack_uv_allowable_center_deviation
ops.allowable_size_deviation = sc.muv_pack_uv_allowable_size_deviation
@@ -143,8 +148,8 @@ def image_uvs_menu_fn(self, context):
layout.menu(MUV_MT_AlignUV.bl_idname, text="Align UV")
layout.separator()
- # Align UV Cursor
layout.label(text="Editor Enhancement", icon=compat.icon('IMAGE'))
+ # Align UV Cursor
layout.menu(MUV_MT_AlignUVCursor.bl_idname, text="Align UV Cursor")
# UV Bounding Box
layout.prop(sc, "muv_uv_bounding_box_show", text="UV Bounding Box")
diff --git a/magic_uv/properites.py b/magic_uv/properites.py
index e553816b..b269cbed 100644
--- a/magic_uv/properites.py
+++ b/magic_uv/properites.py
@@ -20,8 +20,8 @@
__author__ = "Nutti <nutti.metro@gmail.com>"
__status__ = "production"
-__version__ = "6.2"
-__date__ = "31 Jul 2019"
+__version__ = "6.3"
+__date__ = "10 Aug 2020"
from .utils.property_class_registry import PropertyClassRegistry
diff --git a/magic_uv/ui/IMAGE_MT_uvs.py b/magic_uv/ui/IMAGE_MT_uvs.py
index 74e796cc..00d95d9e 100644
--- a/magic_uv/ui/IMAGE_MT_uvs.py
+++ b/magic_uv/ui/IMAGE_MT_uvs.py
@@ -20,8 +20,8 @@
__author__ = "Nutti <nutti.metro@gmail.com>"
__status__ = "production"
-__version__ = "6.2"
-__date__ = "31 Jul 2019"
+__version__ = "6.3"
+__date__ = "10 Aug 2020"
import bpy
@@ -39,7 +39,10 @@ from ..op.select_uv import (
MUV_OT_SelectUV_SelectOverlapped,
MUV_OT_SelectUV_SelectFlipped,
)
-from ..op.uv_inspection import MUV_OT_UVInspection_Update
+from ..op.uv_inspection import (
+ MUV_OT_UVInspection_Update,
+ MUV_OT_UVInspection_PaintUVIsland,
+)
from ..utils.bl_class_registry import BlClassRegistry
@@ -184,5 +187,8 @@ class MUV_MT_UVInspection(bpy.types.Menu):
layout = self.layout
sc = context.scene
- layout.prop(sc, "muv_uv_inspection_show", text="UV Inspection")
+ layout.prop(sc, "muv_uv_inspection_show",
+ text="Show Overlapped/Flipped")
layout.operator(MUV_OT_UVInspection_Update.bl_idname, text="Update")
+ layout.separator()
+ layout.operator(MUV_OT_UVInspection_PaintUVIsland.bl_idname)
diff --git a/magic_uv/ui/VIEW3D_MT_object.py b/magic_uv/ui/VIEW3D_MT_object.py
index b4fca522..f34c74f9 100644
--- a/magic_uv/ui/VIEW3D_MT_object.py
+++ b/magic_uv/ui/VIEW3D_MT_object.py
@@ -20,8 +20,8 @@
__author__ = "Nutti <nutti.metro@gmail.com>"
__status__ = "production"
-__version__ = "6.2"
-__date__ = "31 Jul 2019"
+__version__ = "6.3"
+__date__ = "10 Aug 2020"
import bpy
diff --git a/magic_uv/ui/VIEW3D_MT_uv_map.py b/magic_uv/ui/VIEW3D_MT_uv_map.py
index 853d1855..7ab50ace 100644
--- a/magic_uv/ui/VIEW3D_MT_uv_map.py
+++ b/magic_uv/ui/VIEW3D_MT_uv_map.py
@@ -20,8 +20,8 @@
__author__ = "Nutti <nutti.metro@gmail.com>"
__status__ = "production"
-__version__ = "6.2"
-__date__ = "31 Jul 2019"
+__version__ = "6.3"
+__date__ = "10 Aug 2020"
import bpy.utils
@@ -147,24 +147,25 @@ class MUV_MT_WorldScaleUV(bpy.types.Menu):
layout = self.layout
sc = context.scene
- layout.operator(MUV_OT_WorldScaleUV_Measure.bl_idname,
- text="Measure")
+ layout.operator(MUV_OT_WorldScaleUV_Measure.bl_idname, text="Measure")
- layout.operator(MUV_OT_WorldScaleUV_ApplyManual.bl_idname,
- text="Apply (Manual)")
+ ops = layout.operator(MUV_OT_WorldScaleUV_ApplyManual.bl_idname,
+ text="Apply (Manual)")
+ ops.show_dialog = True
ops = layout.operator(
MUV_OT_WorldScaleUV_ApplyScalingDensity.bl_idname,
text="Apply (Same Desity)")
ops.src_density = sc.muv_world_scale_uv_src_density
ops.same_density = True
+ ops.show_dialog = True
ops = layout.operator(
MUV_OT_WorldScaleUV_ApplyScalingDensity.bl_idname,
text="Apply (Scaling Desity)")
ops.src_density = sc.muv_world_scale_uv_src_density
ops.same_density = False
- ops.tgt_scaling_factor = sc.muv_world_scale_uv_tgt_scaling_factor
+ ops.show_dialog = True
ops = layout.operator(
MUV_OT_WorldScaleUV_ApplyProportionalToMesh.bl_idname,
@@ -172,7 +173,7 @@ class MUV_MT_WorldScaleUV(bpy.types.Menu):
ops.src_density = sc.muv_world_scale_uv_src_density
ops.src_uv_area = sc.muv_world_scale_uv_src_uv_area
ops.src_mesh_area = sc.muv_world_scale_uv_src_mesh_area
- ops.origin = sc.muv_world_scale_uv_origin
+ ops.show_dialog = True
@BlClassRegistry()
diff --git a/magic_uv/ui/__init__.py b/magic_uv/ui/__init__.py
index 50049251..bb16a847 100644
--- a/magic_uv/ui/__init__.py
+++ b/magic_uv/ui/__init__.py
@@ -20,8 +20,8 @@
__author__ = "Nutti <nutti.metro@gmail.com>"
__status__ = "production"
-__version__ = "6.2"
-__date__ = "31 Jul 2019"
+__version__ = "6.3"
+__date__ = "10 Aug 2020"
if "bpy" in locals():
import importlib
diff --git a/magic_uv/ui/uvedit_copy_paste_uv.py b/magic_uv/ui/uvedit_copy_paste_uv.py
index 987a24a0..211737c8 100644
--- a/magic_uv/ui/uvedit_copy_paste_uv.py
+++ b/magic_uv/ui/uvedit_copy_paste_uv.py
@@ -20,8 +20,8 @@
__author__ = "Nutti <nutti.metro@gmail.com>"
__status__ = "production"
-__version__ = "6.2"
-__date__ = "31 Jul 2019"
+__version__ = "6.3"
+__date__ = "10 Aug 2020"
import bpy
diff --git a/magic_uv/ui/uvedit_editor_enhancement.py b/magic_uv/ui/uvedit_editor_enhancement.py
index 6639650c..f98e5193 100644
--- a/magic_uv/ui/uvedit_editor_enhancement.py
+++ b/magic_uv/ui/uvedit_editor_enhancement.py
@@ -20,8 +20,8 @@
__author__ = "Nutti <nutti.metro@gmail.com>"
__status__ = "production"
-__version__ = "6.2"
-__date__ = "31 Jul 2019"
+__version__ = "6.3"
+__date__ = "10 Aug 2020"
import bpy
@@ -32,6 +32,7 @@ from ..op.uv_bounding_box import (
from ..op.uv_inspection import (
MUV_OT_UVInspection_Render,
MUV_OT_UVInspection_Update,
+ MUV_OT_UVInspection_PaintUVIsland,
)
from ..utils.bl_class_registry import BlClassRegistry
from ..utils import compatibility as compat
@@ -143,3 +144,5 @@ class MUV_PT_UVEdit_EditorEnhancement(bpy.types.Panel):
row.prop(sc, "muv_uv_inspection_show_flipped")
row = box.row()
row.prop(sc, "muv_uv_inspection_show_mode")
+ box.separator()
+ box.operator(MUV_OT_UVInspection_PaintUVIsland.bl_idname)
diff --git a/magic_uv/ui/uvedit_uv_manipulation.py b/magic_uv/ui/uvedit_uv_manipulation.py
index 5589b73e..79a1731a 100644
--- a/magic_uv/ui/uvedit_uv_manipulation.py
+++ b/magic_uv/ui/uvedit_uv_manipulation.py
@@ -20,8 +20,8 @@
__author__ = "Nutti <nutti.metro@gmail.com>"
__status__ = "production"
-__version__ = "6.2"
-__date__ = "31 Jul 2019"
+__version__ = "6.3"
+__date__ = "10 Aug 2020"
import bpy
@@ -38,6 +38,7 @@ from ..op.select_uv import (
MUV_OT_SelectUV_SelectFlipped,
)
from ..op.pack_uv import MUV_OT_PackUV
+from ..op.clip_uv import MUV_OT_ClipUV
from ..utils.bl_class_registry import BlClassRegistry
from ..utils import compatibility as compat
@@ -129,3 +130,16 @@ class MUV_PT_UVEdit_UVManipulation(bpy.types.Panel):
box.prop(sc, "muv_pack_uv_allowable_center_deviation", text="")
box.label(text="Allowable Size Deviation:")
box.prop(sc, "muv_pack_uv_allowable_size_deviation", text="")
+
+ box = layout.box()
+ box.prop(sc, "muv_clip_uv_enabled", text="Clip UV")
+ if sc.muv_clip_uv_enabled:
+ ops = box.operator(MUV_OT_ClipUV.bl_idname, text="Clip UV")
+ ops.clip_uv_range_max = sc.muv_clip_uv_range_max
+ ops.clip_uv_range_min = sc.muv_clip_uv_range_min
+ box.label(text="Range:")
+ row = box.row()
+ col = row.column()
+ col.prop(sc, "muv_clip_uv_range_max", text="Max")
+ col = row.column()
+ col.prop(sc, "muv_clip_uv_range_min", text="Min")
diff --git a/magic_uv/ui/view3d_copy_paste_uv_editmode.py b/magic_uv/ui/view3d_copy_paste_uv_editmode.py
index 041f279d..0c7273a3 100644
--- a/magic_uv/ui/view3d_copy_paste_uv_editmode.py
+++ b/magic_uv/ui/view3d_copy_paste_uv_editmode.py
@@ -20,8 +20,8 @@
__author__ = "Nutti <nutti.metro@gmail.com>"
__status__ = "production"
-__version__ = "6.2"
-__date__ = "31 Jul 2019"
+__version__ = "6.3"
+__date__ = "10 Aug 2020"
import bpy
diff --git a/magic_uv/ui/view3d_copy_paste_uv_objectmode.py b/magic_uv/ui/view3d_copy_paste_uv_objectmode.py
index 21d2bc4c..b2a33e9a 100644
--- a/magic_uv/ui/view3d_copy_paste_uv_objectmode.py
+++ b/magic_uv/ui/view3d_copy_paste_uv_objectmode.py
@@ -20,8 +20,8 @@
__author__ = "Nutti <nutti.metro@gmail.com>"
__status__ = "production"
-__version__ = "6.2"
-__date__ = "31 Jul 2019"
+__version__ = "6.3"
+__date__ = "10 Aug 2020"
import bpy
diff --git a/magic_uv/ui/view3d_uv_manipulation.py b/magic_uv/ui/view3d_uv_manipulation.py
index 3a694008..1d10eb65 100644
--- a/magic_uv/ui/view3d_uv_manipulation.py
+++ b/magic_uv/ui/view3d_uv_manipulation.py
@@ -20,8 +20,8 @@
__author__ = "Nutti <nutti.metro@gmail.com>"
__status__ = "production"
-__version__ = "6.2"
-__date__ = "31 Jul 2019"
+__version__ = "6.3"
+__date__ = "10 Aug 2020"
import bpy
@@ -113,17 +113,13 @@ class MUV_PT_View3D_UVManipulation(bpy.types.Panel):
box.prop(sc, "muv_world_scale_uv_mode", text="")
if sc.muv_world_scale_uv_mode == 'MANUAL':
- sp = compat.layout_split(box, 0.4)
- col = sp.column(align=True)
- col.label(text="Target:")
- sp = compat.layout_split(sp, 1.0)
- col = sp.column(align=True)
- ops = col.operator(MUV_OT_WorldScaleUV_ApplyManual.bl_idname,
+ box.label(text="Target:")
+ row = box.row(align=True)
+ ops = row.operator(MUV_OT_WorldScaleUV_ApplyManual.bl_idname,
text="Apply")
- ops.tgt_density = sc.muv_world_scale_uv_tgt_density
- ops.tgt_texture_size = sc.muv_world_scale_uv_tgt_texture_size
- ops.origin = sc.muv_world_scale_uv_origin
+ MUV_OT_WorldScaleUV_ApplyManual.setup_argument(ops, sc)
ops.show_dialog = False
+ row.prop(sc, "muv_world_scale_uv_apply_only_selected")
sp = compat.layout_split(box, 0.5)
col = sp.column()
col.prop(sc, "muv_world_scale_uv_tgt_texture_size",
@@ -133,16 +129,15 @@ class MUV_PT_View3D_UVManipulation(bpy.types.Panel):
col.label(text="Density:")
col.prop(sc, "muv_world_scale_uv_tgt_density")
box.prop(sc, "muv_world_scale_uv_origin", text="Origin")
+ box.prop(sc, "muv_world_scale_uv_tgt_area_calc_method")
elif sc.muv_world_scale_uv_mode == 'SAME_DENSITY':
- sp = compat.layout_split(box, 0.4)
- col = sp.column(align=True)
- col.label(text="Source:")
- sp = compat.layout_split(sp, 1.0)
- col = sp.column(align=True)
- ops = col.operator(MUV_OT_WorldScaleUV_Measure.bl_idname,
+ box.label(text="Source:")
+ row = box.row(align=True)
+ ops = row.operator(MUV_OT_WorldScaleUV_Measure.bl_idname,
text="Measure")
- ops.tgt_texture = sc.muv_world_scale_uv_measure_tgt_texture
+ MUV_OT_WorldScaleUV_Measure.setup_argument(ops, sc)
+ row.prop(sc, "muv_world_scale_uv_measure_only_selected")
col = box.column(align=True)
col.prop(sc, "muv_world_scale_uv_measure_tgt_texture")
sp = compat.layout_split(box, 0.7)
@@ -154,30 +149,27 @@ class MUV_PT_View3D_UVManipulation(bpy.types.Panel):
col.label(text="px2/cm2")
box.separator()
- sp = compat.layout_split(box, 0.4)
- col = sp.column(align=True)
- col.label(text="Target:")
- sp = compat.layout_split(sp, 1.0)
- col = sp.column(align=True)
- ops = col.operator(
+
+ box.label(text="Target:")
+ row = box.row(align=True)
+ ops = row.operator(
MUV_OT_WorldScaleUV_ApplyScalingDensity.bl_idname,
text="Apply")
- ops.src_density = sc.muv_world_scale_uv_src_density
- ops.origin = sc.muv_world_scale_uv_origin
+ MUV_OT_WorldScaleUV_ApplyScalingDensity.setup_argument(ops, sc)
ops.same_density = True
ops.show_dialog = False
- ops.tgt_texture = sc.muv_world_scale_uv_apply_tgt_texture
+ row.prop(sc, "muv_world_scale_uv_apply_only_selected")
+ box.prop(sc, "muv_world_scale_uv_apply_tgt_texture")
box.prop(sc, "muv_world_scale_uv_origin", text="Origin")
+ box.prop(sc, "muv_world_scale_uv_tgt_area_calc_method")
elif sc.muv_world_scale_uv_mode == 'SCALING_DENSITY':
- sp = compat.layout_split(box, 0.4)
- col = sp.column(align=True)
- col.label(text="Source:")
- sp = compat.layout_split(sp, 1.0)
- col = sp.column(align=True)
- ops = col.operator(MUV_OT_WorldScaleUV_Measure.bl_idname,
+ box.label(text="Source:")
+ row = box.row(align=True)
+ ops = row.operator(MUV_OT_WorldScaleUV_Measure.bl_idname,
text="Measure")
- ops.tgt_texture = sc.muv_world_scale_uv_measure_tgt_texture
+ MUV_OT_WorldScaleUV_Measure.setup_argument(ops, sc)
+ row.prop(sc, "muv_world_scale_uv_measure_only_selected")
col = box.column(align=True)
col.prop(sc, "muv_world_scale_uv_measure_tgt_texture")
sp = compat.layout_split(box, 0.7)
@@ -189,34 +181,29 @@ class MUV_PT_View3D_UVManipulation(bpy.types.Panel):
col.label(text="px2/cm2")
box.separator()
- sp = compat.layout_split(box, 0.4)
- col = sp.column(align=True)
- col.label(text="Target:")
- sp = compat.layout_split(sp, 1.0)
- col = sp.column(align=True)
- ops = col.operator(
+
+ box.label(text="Target:")
+ row = box.row(align=True)
+ ops = row.operator(
MUV_OT_WorldScaleUV_ApplyScalingDensity.bl_idname,
text="Apply")
- ops.src_density = sc.muv_world_scale_uv_src_density
- ops.origin = sc.muv_world_scale_uv_origin
+ MUV_OT_WorldScaleUV_ApplyScalingDensity.setup_argument(ops, sc)
ops.same_density = False
ops.show_dialog = False
- ops.tgt_scaling_factor = \
- sc.muv_world_scale_uv_tgt_scaling_factor
- ops.tgt_texture = sc.muv_world_scale_uv_apply_tgt_texture
+ row.prop(sc, "muv_world_scale_uv_apply_only_selected")
+ box.prop(sc, "muv_world_scale_uv_apply_tgt_texture")
box.prop(sc, "muv_world_scale_uv_tgt_scaling_factor",
text="Scaling Factor")
box.prop(sc, "muv_world_scale_uv_origin", text="Origin")
+ box.prop(sc, "muv_world_scale_uv_tgt_area_calc_method")
elif sc.muv_world_scale_uv_mode == 'PROPORTIONAL_TO_MESH':
- sp = compat.layout_split(box, 0.4)
- col = sp.column(align=True)
- col.label(text="Source:")
- sp = compat.layout_split(sp, 1.0)
- col = sp.column(align=True)
- ops = col.operator(MUV_OT_WorldScaleUV_Measure.bl_idname,
+ box.label(text="Source:")
+ row = box.row(align=True)
+ ops = row.operator(MUV_OT_WorldScaleUV_Measure.bl_idname,
text="Measure")
- ops.tgt_texture = sc.muv_world_scale_uv_measure_tgt_texture
+ MUV_OT_WorldScaleUV_Measure.setup_argument(ops, sc)
+ row.prop(sc, "muv_world_scale_uv_measure_only_selected")
col = box.column(align=True)
col.prop(sc, "muv_world_scale_uv_measure_tgt_texture")
sp = compat.layout_split(box, 0.7)
@@ -234,24 +221,19 @@ class MUV_PT_View3D_UVManipulation(bpy.types.Panel):
col.enabled = False
box.separator()
- sp = compat.layout_split(box, 0.4)
- col = sp.column(align=True)
- col.label(text="Target:")
- sp = compat.layout_split(sp, 1.0)
- col = sp.column(align=True)
- ops = col.operator(
+
+ box.label(text="Target:")
+ row = box.row(align=True)
+ ops = row.operator(
MUV_OT_WorldScaleUV_ApplyProportionalToMesh.bl_idname,
text="Apply")
- ops.src_density = sc.muv_world_scale_uv_src_density
- ops.src_uv_area = sc.muv_world_scale_uv_src_uv_area
- ops.src_mesh_area = sc.muv_world_scale_uv_src_mesh_area
- ops.origin = sc.muv_world_scale_uv_origin
+ MUV_OT_WorldScaleUV_ApplyProportionalToMesh.setup_argument(
+ ops, sc)
ops.show_dialog = False
- ops.tgt_texture = sc.muv_world_scale_uv_apply_tgt_texture
+ row.prop(sc, "muv_world_scale_uv_apply_only_selected")
+ box.prop(sc, "muv_world_scale_uv_apply_tgt_texture")
box.prop(sc, "muv_world_scale_uv_origin", text="Origin")
-
- col = box.column(align=True)
- col.prop(sc, "muv_world_scale_uv_apply_tgt_texture")
+ box.prop(sc, "muv_world_scale_uv_tgt_area_calc_method")
box = layout.box()
box.prop(sc, "muv_preserve_uv_aspect_enabled",
diff --git a/magic_uv/ui/view3d_uv_mapping.py b/magic_uv/ui/view3d_uv_mapping.py
index 0e31620b..4344adb7 100644
--- a/magic_uv/ui/view3d_uv_mapping.py
+++ b/magic_uv/ui/view3d_uv_mapping.py
@@ -20,8 +20,8 @@
__author__ = "Nutti <nutti.metro@gmail.com>"
__status__ = "production"
-__version__ = "6.2"
-__date__ = "31 Jul 2019"
+__version__ = "6.3"
+__date__ = "10 Aug 2020"
import bpy
diff --git a/magic_uv/updater.py b/magic_uv/updater.py
index d522c009..8d610b16 100644
--- a/magic_uv/updater.py
+++ b/magic_uv/updater.py
@@ -20,8 +20,8 @@
__author__ = "Nutti <nutti.metro@gmail.com>"
__status__ = "production"
-__version__ = "6.2"
-__date__ = "31 Jul 2019"
+__version__ = "6.3"
+__date__ = "10 Aug 2020"
import os
diff --git a/magic_uv/utils/__init__.py b/magic_uv/utils/__init__.py
index 0e6ef744..c96b9225 100644
--- a/magic_uv/utils/__init__.py
+++ b/magic_uv/utils/__init__.py
@@ -20,8 +20,8 @@
__author__ = "Nutti <nutti.metro@gmail.com>"
__status__ = "production"
-__version__ = "6.2"
-__date__ = "31 Jul 2019"
+__version__ = "6.3"
+__date__ = "10 Aug 2020"
if "bpy" in locals():
import importlib
diff --git a/magic_uv/utils/addon_updater.py b/magic_uv/utils/addon_updater.py
index 2f3d0c0f..5df59fd4 100644
--- a/magic_uv/utils/addon_updater.py
+++ b/magic_uv/utils/addon_updater.py
@@ -20,8 +20,8 @@
__author__ = "Nutti <nutti.metro@gmail.com>"
__status__ = "production"
-__version__ = "6.2"
-__date__ = "31 Jul 2019"
+__version__ = "6.3"
+__date__ = "10 Aug 2020"
from threading import Lock
import urllib
@@ -60,7 +60,7 @@ def _request(url, json_decode=True):
return json.JSONDecoder().decode(data.decode())
except Exception as e:
raise RuntimeError("API response has invalid JSON format ({})"
- .format(str(e.reason)))
+ .format(str(e)))
return data.decode()
@@ -153,7 +153,7 @@ def _compare_version(ver1, ver2):
if v1[idx] > v2[idx]:
return 1 # v1 > v2
- elif v1[idx] < v2[idx]:
+ if v1[idx] < v2[idx]:
return -1 # v1 < v2
return comp(v1, v2, idx + 1)
diff --git a/magic_uv/utils/bl_class_registry.py b/magic_uv/utils/bl_class_registry.py
index 826f1483..f9f05faf 100644
--- a/magic_uv/utils/bl_class_registry.py
+++ b/magic_uv/utils/bl_class_registry.py
@@ -20,8 +20,8 @@
__author__ = "Nutti <nutti.metro@gmail.com>"
__status__ = "production"
-__version__ = "6.2"
-__date__ = "31 Jul 2019"
+__version__ = "6.3"
+__date__ = "10 Aug 2020"
import bpy
diff --git a/magic_uv/utils/compatibility.py b/magic_uv/utils/compatibility.py
index 6b7da000..b4c7c4ea 100644
--- a/magic_uv/utils/compatibility.py
+++ b/magic_uv/utils/compatibility.py
@@ -20,8 +20,8 @@
__author__ = "Nutti <nutti.metro@gmail.com>"
__status__ = "production"
-__version__ = "6.2"
-__date__ = "31 Jul 2019"
+__version__ = "6.3"
+__date__ = "10 Aug 2020"
import bpy
import bgl
diff --git a/magic_uv/utils/property_class_registry.py b/magic_uv/utils/property_class_registry.py
index dff4712f..9caa735c 100644
--- a/magic_uv/utils/property_class_registry.py
+++ b/magic_uv/utils/property_class_registry.py
@@ -20,8 +20,8 @@
__author__ = "Nutti <nutti.metro@gmail.com>"
__status__ = "production"
-__version__ = "6.2"
-__date__ = "31 Jul 2019"
+__version__ = "6.3"
+__date__ = "10 Aug 2020"
from .. import common
diff --git a/measureit/measureit_geometry.py b/measureit/measureit_geometry.py
index 1f314f7f..998bc038 100644
--- a/measureit/measureit_geometry.py
+++ b/measureit/measureit_geometry.py
@@ -413,7 +413,7 @@ def draw_segments(context, myobj, op, region, rv3d):
if ms.gltype == 11: # arc
# print length or arc and angle
if ms.glarc_len is True:
- tx_dist = ms.glarc_txlen + format_distance(fmt, units, arc_length)
+ tx_dist = ms.glarc_txlen + format_distance(fmt, units, arc_length * scale)
else:
tx_dist = " "
@@ -453,7 +453,7 @@ def draw_segments(context, myobj, op, region, rv3d):
if scene.measureit_gl_show_d is True and ms.gldist is True and \
ms.glarc_rad is True:
tx_dist = ms.glarc_txradio + format_distance(fmt, units,
- dist * scene.measureit_scale_factor)
+ dist * scene.measureit_scale_factor * scale)
else:
tx_dist = " "
if ms.gltype == 2:
diff --git a/mesh_auto_mirror.py b/mesh_auto_mirror.py
index 1d89d4e7..47611668 100644
--- a/mesh_auto_mirror.py
+++ b/mesh_auto_mirror.py
@@ -194,8 +194,7 @@ class AutoMirror(bpy.types.Operator):
bpy.context.object.modifiers[-1].show_on_cage = automirror.show_on_cage
if automirror.apply_mirror:
bpy.ops.object.mode_set(mode = 'OBJECT')
- bpy.ops.object.modifier_apply(apply_as = 'DATA',
- modifier = bpy.context.object.modifiers[-1].name)
+ bpy.ops.object.modifier_apply(modifier = bpy.context.object.modifiers[-1].name)
if automirror.toggle_edit:
bpy.ops.object.mode_set(mode = 'EDIT')
else:
diff --git a/mesh_bsurfaces.py b/mesh_bsurfaces.py
index c0c7a4f9..5e3a601c 100644
--- a/mesh_bsurfaces.py
+++ b/mesh_bsurfaces.py
@@ -20,7 +20,7 @@
bl_info = {
"name": "Bsurfaces GPL Edition",
"author": "Eclectiel, Vladimir Spivak (cwolf3d)",
- "version": (1, 7, 8),
+ "version": (1, 7, 9),
"blender": (2, 80, 0),
"location": "View3D EditMode > Sidebar > Edit Tab",
"description": "Modeling and retopology tool",
@@ -237,6 +237,9 @@ class MESH_OT_SURFSK_add_surface(Operator):
bl_description = "Generates surfaces from grease pencil strokes, bezier curves or loose edges"
bl_options = {'REGISTER', 'UNDO'}
+ is_crosshatch: BoolProperty(
+ default=False
+ )
is_fill_faces: BoolProperty(
default=False
)
@@ -1435,6 +1438,9 @@ class MESH_OT_SURFSK_add_surface(Operator):
me.from_pydata(all_verts_coords, all_edges, [])
ob = object_utils.object_data_add(context, me)
+ ob.location = (0.0, 0.0, 0.0)
+ ob.rotation_euler = (0.0, 0.0, 0.0)
+ ob.scale = (1.0, 1.0, 1.0)
bpy.ops.object.select_all('INVOKE_REGION_WIN', action='DESELECT')
ob.select_set(True)
@@ -1607,6 +1613,9 @@ class MESH_OT_SURFSK_add_surface(Operator):
me_surf = bpy.data.meshes.new(surf_me_name)
me_surf.from_pydata(all_surface_verts_co, [], all_surface_faces)
ob_surface = object_utils.object_data_add(context, me_surf)
+ ob_surface.location = (0.0, 0.0, 0.0)
+ ob_surface.rotation_euler = (0.0, 0.0, 0.0)
+ ob_surface.scale = (1.0, 1.0, 1.0)
# Delete final points temporal object
bpy.ops.object.delete({"selected_objects": [final_points_ob]})
@@ -1660,7 +1669,7 @@ class MESH_OT_SURFSK_add_surface(Operator):
shrinkwrap_modifier.wrap_method = "NEAREST_VERTEX"
shrinkwrap_modifier.target = self.main_object
- bpy.ops.object.modifier_apply('INVOKE_REGION_WIN', apply_as='DATA', modifier=shrinkwrap_modifier.name)
+ bpy.ops.object.modifier_apply('INVOKE_REGION_WIN', modifier=shrinkwrap_modifier.name)
# Make list with verts of original mesh as index and coords as value
main_object_verts_coords = []
@@ -4010,7 +4019,7 @@ class CURVE_OT_SURFSK_reorder_splines(Operator):
bpy.ops.object.modifier_add('INVOKE_REGION_WIN', type='SHRINKWRAP')
curves_duplicate_2.modifiers["Shrinkwrap"].wrap_method = "NEAREST_VERTEX"
curves_duplicate_2.modifiers["Shrinkwrap"].target = GP_strokes_mesh
- bpy.ops.object.modifier_apply('INVOKE_REGION_WIN', apply_as='DATA', modifier='Shrinkwrap')
+ bpy.ops.object.modifier_apply('INVOKE_REGION_WIN', modifier='Shrinkwrap')
# Get the distance of each vert from its original position to its position with Shrinkwrap
nearest_points_coords = {}
diff --git a/mesh_tissue/dual_mesh.py b/mesh_tissue/dual_mesh.py
index db24f896..404d5ef5 100644
--- a/mesh_tissue/dual_mesh.py
+++ b/mesh_tissue/dual_mesh.py
@@ -240,9 +240,7 @@ class dual_mesh(Operator):
if ob.modifiers[0].name == "dual_mesh_subsurf":
break
- bpy.ops.object.modifier_apply(
- apply_as='DATA', modifier='dual_mesh_subsurf'
- )
+ bpy.ops.object.modifier_apply(modifier='dual_mesh_subsurf')
bpy.ops.object.mode_set(mode='EDIT')
bpy.ops.mesh.select_all(action='DESELECT')
diff --git a/object_carver/carver_operator.py b/object_carver/carver_operator.py
index 95fa4af0..880f6491 100644
--- a/object_carver/carver_operator.py
+++ b/object_carver/carver_operator.py
@@ -1196,7 +1196,7 @@ class CARVER_OT_operator(bpy.types.Operator):
for mb in ActiveObj.modifiers:
if (mb.type == 'BOOLEAN') and (mb.name == BMname):
try:
- bpy.ops.object.modifier_apply(apply_as='DATA', modifier=BMname)
+ bpy.ops.object.modifier_apply(modifier=BMname)
except:
bpy.ops.object.modifier_remove(modifier=BMname)
exc_type, exc_value, exc_traceback = sys.exc_info()
@@ -1208,7 +1208,7 @@ class CARVER_OT_operator(bpy.types.Operator):
for mb in self.CurrentObj.modifiers:
if (mb.type == 'SOLIDIFY') and (mb.name == "CT_SOLIDIFY"):
try:
- bpy.ops.object.modifier_apply(apply_as='DATA', modifier="CT_SOLIDIFY")
+ bpy.ops.object.modifier_apply(modifier="CT_SOLIDIFY")
except:
exc_type, exc_value, exc_traceback = sys.exc_info()
self.report({'ERROR'}, str(exc_value))
@@ -1243,7 +1243,7 @@ class CARVER_OT_operator(bpy.types.Operator):
for mb in ActiveObj.modifiers:
if (mb.type == 'BOOLEAN') and (mb.name == BMname):
try:
- bpy.ops.object.modifier_apply(apply_as='DATA', modifier=BMname)
+ bpy.ops.object.modifier_apply(modifier=BMname)
except:
bpy.ops.object.modifier_remove(modifier=BMname)
exc_type, exc_value, exc_traceback = sys.exc_info()
diff --git a/object_carver/carver_utils.py b/object_carver/carver_utils.py
index 495aa1ce..1bd7455f 100644
--- a/object_carver/carver_utils.py
+++ b/object_carver/carver_utils.py
@@ -695,7 +695,7 @@ def boolean_operation(bool_type="DIFFERENCE"):
ActiveObj = bpy.context.active_object
sel_index = 0 if bpy.context.selected_objects[0] != bpy.context.active_object else 1
- # bpy.ops.object.modifier_apply(apply_as='DATA', modifier="CT_SOLIDIFY")
+ # bpy.ops.object.modifier_apply(modifier="CT_SOLIDIFY")
bool_name = "CT_" + bpy.context.selected_objects[sel_index].name
BoolMod = ActiveObj.modifiers.new(bool_name, "BOOLEAN")
BoolMod.object = bpy.context.selected_objects[sel_index]
@@ -736,14 +736,14 @@ def Rebool(context, self):
if self.ObjectBrush or self.ProfileBrush:
rebool_obj.show_in_front = False
try:
- bpy.ops.object.modifier_apply(apply_as='DATA', modifier="CT_SOLIDIFY")
+ bpy.ops.object.modifier_apply(modifier="CT_SOLIDIFY")
except:
exc_type, exc_value, exc_traceback = sys.exc_info()
self.report({'ERROR'}, str(exc_value))
if self.dont_apply_boolean is False:
try:
- bpy.ops.object.modifier_apply(apply_as='DATA', modifier="CT_INTERSECT")
+ bpy.ops.object.modifier_apply(modifier="CT_INTERSECT")
except:
exc_type, exc_value, exc_traceback = sys.exc_info()
self.report({'ERROR'}, str(exc_value))
@@ -758,7 +758,7 @@ def Rebool(context, self):
target_obj.select_set(True)
if self.dont_apply_boolean is False:
try:
- bpy.ops.object.modifier_apply(apply_as='DATA', modifier="CT_DIFFERENCE")
+ bpy.ops.object.modifier_apply(modifier="CT_DIFFERENCE")
except:
exc_type, exc_value, exc_traceback = sys.exc_info()
self.report({'ERROR'}, str(exc_value))
diff --git a/object_collection_manager/__init__.py b/object_collection_manager/__init__.py
index 928a62ec..e5f6a9ec 100644
--- a/object_collection_manager/__init__.py
+++ b/object_collection_manager/__init__.py
@@ -22,7 +22,7 @@ bl_info = {
"name": "Collection Manager",
"description": "Manage collections and their objects",
"author": "Ryan Inch",
- "version": (2, 9, 2),
+ "version": (2, 14, 1),
"blender": (2, 80, 0),
"location": "View3D - Object Mode (Shortcut - M)",
"warning": '', # used for warning icon and text in addons panel
@@ -77,6 +77,8 @@ class CollectionManagerProperties(PropertyGroup):
show_hide_viewport: BoolProperty(default=True, name="[VV] Hide in Viewport")
show_disable_viewport: BoolProperty(default=False, name="[DV] Disable in Viewports")
show_render: BoolProperty(default=False, name="[RR] Disable in Renders")
+ show_holdout: BoolProperty(default=False, name="[HH] Holdout")
+ show_indirect_only: BoolProperty(default=False, name="[IO] Indirect Only")
align_local_ops: BoolProperty(default=False, name="Align Local Options",
description="Align local options in a column to the right")
@@ -108,14 +110,22 @@ classes = (
operators.CMUnDisableViewportAllOperator,
operators.CMDisableRenderOperator,
operators.CMUnDisableRenderAllOperator,
+ operators.CMHoldoutOperator,
+ operators.CMUnHoldoutAllOperator,
+ operators.CMIndirectOnlyOperator,
+ operators.CMUnIndirectOnlyAllOperator,
operators.CMNewCollectionOperator,
operators.CMRemoveCollectionOperator,
+ operators.CMRemoveEmptyCollectionsOperator,
+ operators.CMSelectCollectionObjectsOperator,
operators.CMSetCollectionOperator,
operators.CMPhantomModeOperator,
+ operators.CMApplyPhantomModeOperator,
preferences.CMPreferences,
ui.CM_UL_items,
ui.CollectionManager,
ui.CMDisplayOptionsPanel,
+ ui.SpecialsMenu,
CollectionManagerProperties,
)
@@ -133,6 +143,18 @@ def undo_redo_post_handler(dummy):
internals.move_selection.clear()
internals.move_active = None
+
+def menu_addition(self, context):
+ layout = self.layout
+
+ layout.operator('view3d.collection_manager')
+
+ if bpy.context.preferences.addons[__package__].preferences.enable_qcd:
+ layout.operator('view3d.qcd_move_widget')
+
+ layout.separator()
+
+
def register():
for cls in classes:
bpy.utils.register_class(cls)
@@ -145,6 +167,9 @@ def register():
kmi = km.keymap_items.new('view3d.collection_manager', 'M', 'PRESS')
addon_keymaps.append((km, kmi))
+ # Add Collection Manager & QCD Move Widget to the Object->Collections menu
+ bpy.types.VIEW3D_MT_object_collection.prepend(menu_addition)
+
bpy.app.handlers.depsgraph_update_post.append(depsgraph_update_post_handler)
bpy.app.handlers.undo_post.append(undo_redo_post_handler)
bpy.app.handlers.redo_post.append(undo_redo_post_handler)
@@ -159,6 +184,9 @@ def unregister():
for cls in classes:
bpy.utils.unregister_class(cls)
+ # Remove Collection Manager & QCD Move Widget from the Object->Collections menu
+ bpy.types.VIEW3D_MT_object_collection.remove(menu_addition)
+
bpy.app.handlers.depsgraph_update_post.remove(depsgraph_update_post_handler)
bpy.app.handlers.undo_post.remove(undo_redo_post_handler)
bpy.app.handlers.redo_post.remove(undo_redo_post_handler)
diff --git a/object_collection_manager/internals.py b/object_collection_manager/internals.py
index 8e0c5b90..163e9804 100644
--- a/object_collection_manager/internals.py
+++ b/object_collection_manager/internals.py
@@ -53,12 +53,16 @@ rto_history = {
"disable": {},
"disable_all": {},
"render": {},
- "render_all": {}
+ "render_all": {},
+ "holdout": {},
+ "holdout_all": {},
+ "indirect": {},
+ "indirect_all": {},
}
expand_history = {
"target": "",
- "history": []
+ "history": [],
}
phantom_history = {
@@ -70,12 +74,16 @@ phantom_history = {
"hide_history": {},
"disable_history": {},
"render_history": {},
+ "holdout_history": {},
+ "indirect_history": {},
"exclude_all_history": [],
"select_all_history": [],
"hide_all_history": [],
"disable_all_history": [],
- "render_all_history": []
+ "render_all_history": [],
+ "holdout_all_history": [],
+ "indirect_all_history": [],
}
copy_buffer = {
@@ -210,46 +218,63 @@ class QCDSlots():
if self.length() > 20:
break
- def renumerate(self, *, depth_first=False, beginning=False):
+ def renumerate(self, *, beginning=False, depth_first=False, constrain=False):
if beginning:
self.clear_slots()
self.overrides.clear()
starting_laycol_name = self.get_name("1")
- if starting_laycol_name:
- laycol = layer_collections[starting_laycol_name]["parent"]["ptr"]
- else:
+ if not starting_laycol_name:
laycol = bpy.context.view_layer.layer_collection
starting_laycol_name = laycol.children[0].name
self.clear_slots()
self.overrides.clear()
- laycol_iter_list = []
- for laycol in laycol.children:
- if laycol.name == starting_laycol_name or laycol_iter_list:
- laycol_iter_list.append(laycol)
+ if depth_first:
+ parent = layer_collections[starting_laycol_name]["parent"]
+ x = 1
+
+ for laycol in layer_collections.values():
+ if self.length() == 0 and starting_laycol_name != laycol["name"]:
+ continue
+
+ if constrain:
+ if self.length():
+ if laycol["parent"]["name"] == parent["name"]:
+ break
- while laycol_iter_list:
- layer_collection = laycol_iter_list.pop(0)
+ self.add_slot(f"{x}", laycol["name"])
- for x in range(20):
- if self.contains(name=layer_collection.name):
+ x += 1
+
+ if self.length() > 20:
break
- if not self.contains(idx=f"{x+1}"):
- self.add_slot(f"{x+1}", layer_collection.name)
+ else:
+ laycol = layer_collections[starting_laycol_name]["parent"]["ptr"]
+ laycol_iter_list = []
+ for laycol in laycol.children:
+ if laycol.name == starting_laycol_name:
+ laycol_iter_list.append(laycol)
- if depth_first:
- laycol_iter_list[0:0] = list(layer_collection.children)
+ elif not constrain and laycol_iter_list:
+ laycol_iter_list.append(laycol)
+
+ x = 1
+ while laycol_iter_list:
+ layer_collection = laycol_iter_list.pop(0)
+
+ self.add_slot(f"{x}", layer_collection.name)
- else:
laycol_iter_list.extend(list(layer_collection.children))
- if self.length() > 20:
- break
+ x += 1
+
+ if self.length() > 20:
+ break
for laycol in layer_collections.values():
@@ -300,7 +325,9 @@ def update_col_name(self, context):
"select",
"hide",
"disable",
- "render"
+ "render",
+ "holdout",
+ "indirect",
]
orig_targets = {
@@ -567,6 +594,8 @@ def generate_state():
"hide": [],
"disable": [],
"render": [],
+ "holdout": [],
+ "indirect": [],
}
for name, laycol in layer_collections.items():
@@ -576,17 +605,27 @@ def generate_state():
state["hide"].append(laycol["ptr"].hide_viewport)
state["disable"].append(laycol["ptr"].collection.hide_viewport)
state["render"].append(laycol["ptr"].collection.hide_render)
+ state["holdout"].append(laycol["ptr"].holdout)
+ state["indirect"].append(laycol["ptr"].indirect_only)
return state
-def get_move_selection():
+def get_move_selection(*, names_only=False):
global move_selection
if not move_selection:
- move_selection = [obj.name for obj in bpy.context.selected_objects]
+ move_selection = {obj.name for obj in bpy.context.selected_objects}
+
+ if names_only:
+ return move_selection
- return [bpy.data.objects[name] for name in move_selection]
+ else:
+ if len(move_selection) <= 5:
+ return {bpy.data.objects[name] for name in move_selection}
+
+ else:
+ return {obj for obj in bpy.data.objects if obj.name in move_selection}
def get_move_active():
@@ -596,7 +635,7 @@ def get_move_active():
if not move_active:
move_active = getattr(bpy.context.view_layer.objects.active, "name", None)
- if move_active not in [obj.name for obj in get_move_selection()]:
+ if move_active not in get_move_selection(names_only=True):
move_active = None
return bpy.data.objects[move_active] if move_active else None
diff --git a/object_collection_manager/operator_utils.py b/object_collection_manager/operator_utils.py
index f99d870b..20c7dee7 100644
--- a/object_collection_manager/operator_utils.py
+++ b/object_collection_manager/operator_utils.py
@@ -17,12 +17,18 @@
# ##### END GPL LICENSE BLOCK #####
# Copyright 2011, Ryan Inch
+import bpy
from .internals import (
layer_collections,
+ qcd_slots,
+ expanded,
+ expand_history,
rto_history,
copy_buffer,
swap_buffer,
+ update_property_group,
+ get_move_selection,
)
rto_path = {
@@ -30,12 +36,67 @@ rto_path = {
"select": "collection.hide_select",
"hide": "hide_viewport",
"disable": "collection.hide_viewport",
- "render": "collection.hide_render"
+ "render": "collection.hide_render",
+ "holdout": "holdout",
+ "indirect": "indirect_only",
+ }
+
+set_off_on = {
+ "exclude": {
+ "off": True,
+ "on": False
+ },
+ "select": {
+ "off": True,
+ "on": False
+ },
+ "hide": {
+ "off": True,
+ "on": False
+ },
+ "disable": {
+ "off": True,
+ "on": False
+ },
+ "render": {
+ "off": True,
+ "on": False
+ },
+ "holdout": {
+ "off": False,
+ "on": True
+ },
+ "indirect": {
+ "off": False,
+ "on": True
+ }
+ }
+
+get_off_on = {
+ False: {
+ "exclude": "on",
+ "select": "on",
+ "hide": "on",
+ "disable": "on",
+ "render": "on",
+ "holdout": "off",
+ "indirect": "off",
+ },
+
+ True: {
+ "exclude": "off",
+ "select": "off",
+ "hide": "off",
+ "disable": "off",
+ "render": "off",
+ "holdout": "on",
+ "indirect": "on",
+ }
}
def get_rto(layer_collection, rto):
- if rto in ["exclude", "hide"]:
+ if rto in ["exclude", "hide", "holdout", "indirect"]:
return getattr(layer_collection, rto_path[rto])
else:
@@ -44,7 +105,7 @@ def get_rto(layer_collection, rto):
def set_rto(layer_collection, rto, value):
- if rto in ["exclude", "hide"]:
+ if rto in ["exclude", "hide", "holdout", "indirect"]:
setattr(layer_collection, rto_path[rto], value)
else:
@@ -52,30 +113,34 @@ def set_rto(layer_collection, rto, value):
setattr(collection, rto_path[rto].split(".")[1], value)
-def apply_to_children(laycol, apply_function):
- laycol_iter_list = [laycol.children]
+def apply_to_children(parent, apply_function):
+ # works for both Collections & LayerCollections
+ child_lists = [parent.children]
- while len(laycol_iter_list) > 0:
- new_laycol_iter_list = []
+ while child_lists:
+ new_child_lists = []
- for laycol_iter in laycol_iter_list:
- for layer_collection in laycol_iter:
- apply_function(layer_collection)
+ for child_list in child_lists:
+ for child in child_list:
+ apply_function(child)
- if len(layer_collection.children) > 0:
- new_laycol_iter_list.append(layer_collection.children)
+ if child.children:
+ new_child_lists.append(child.children)
- laycol_iter_list = new_laycol_iter_list
+ child_lists = new_child_lists
def isolate_rto(cls, self, view_layer, rto, *, children=False):
+ off = set_off_on[rto]["off"]
+ on = set_off_on[rto]["on"]
+
laycol_ptr = layer_collections[self.name]["ptr"]
target = rto_history[rto][view_layer]["target"]
history = rto_history[rto][view_layer]["history"]
# get active collections
active_layer_collections = [x["ptr"] for x in layer_collections.values()
- if not get_rto(x["ptr"], rto)]
+ if get_rto(x["ptr"], rto) == on]
# check if previous state should be restored
if cls.isolated and self.name == target:
@@ -93,7 +158,7 @@ def isolate_rto(cls, self, view_layer, rto, *, children=False):
active_layer_collections[0].name == self.name):
# activate all collections
for item in layer_collections.values():
- set_rto(item["ptr"], rto, False)
+ set_rto(item["ptr"], rto, on)
# reset target and history
del rto_history[rto][view_layer]
@@ -123,15 +188,15 @@ def isolate_rto(cls, self, view_layer, rto, *, children=False):
# isolate collection
for item in layer_collections.values():
if item["name"] != laycol_ptr.name:
- set_rto(item["ptr"], rto, True)
+ set_rto(item["ptr"], rto, off)
- set_rto(laycol_ptr, rto, False)
+ set_rto(laycol_ptr, rto, on)
- if rto != "exclude":
+ if rto not in ["exclude", "holdout", "indirect"]:
# activate all parents
laycol = layer_collections[self.name]
while laycol["id"] != 0:
- set_rto(laycol["ptr"], rto, False)
+ set_rto(laycol["ptr"], rto, on)
laycol = laycol["parent"]
if children:
@@ -149,7 +214,7 @@ def isolate_rto(cls, self, view_layer, rto, *, children=False):
apply_to_children(laycol_ptr, restore_child_states)
- else:
+ elif rto == "exclude":
# deactivate all children
def deactivate_all_children(layer_collection):
set_rto(layer_collection, rto, True)
@@ -176,6 +241,9 @@ def toggle_children(self, view_layer, rto):
def activate_all_rtos(view_layer, rto):
+ off = set_off_on[rto]["off"]
+ on = set_off_on[rto]["on"]
+
history = rto_history[rto+"_all"][view_layer]
# if not activated, activate all
@@ -183,12 +251,12 @@ def activate_all_rtos(view_layer, rto):
keep_history = False
for item in reversed(list(layer_collections.values())):
- if get_rto(item["ptr"], rto) == True:
+ if get_rto(item["ptr"], rto) == off:
keep_history = True
history.append(get_rto(item["ptr"], rto))
- set_rto(item["ptr"], rto, False)
+ set_rto(item["ptr"], rto, on)
if not keep_history:
history.clear()
@@ -226,12 +294,22 @@ def copy_rtos(view_layer, rto):
# copy
copy_buffer["RTO"] = rto
for laycol in layer_collections.values():
- copy_buffer["values"].append(get_rto(laycol["ptr"], rto))
+ copy_buffer["values"].append(get_off_on[
+ get_rto(laycol["ptr"], rto)
+ ][
+ rto
+ ]
+ )
else:
# paste
for x, laycol in enumerate(layer_collections.values()):
- set_rto(laycol["ptr"], rto, copy_buffer["values"][x])
+ set_rto(laycol["ptr"],
+ rto,
+ set_off_on[rto][
+ copy_buffer["values"][x]
+ ]
+ )
# clear rto history
rto_history[rto].pop(view_layer, None)
@@ -247,18 +325,41 @@ def swap_rtos(view_layer, rto):
# get A
swap_buffer["A"]["RTO"] = rto
for laycol in layer_collections.values():
- swap_buffer["A"]["values"].append(get_rto(laycol["ptr"], rto))
+ swap_buffer["A"]["values"].append(get_off_on[
+ get_rto(laycol["ptr"], rto)
+ ][
+ rto
+ ]
+ )
else:
# get B
swap_buffer["B"]["RTO"] = rto
for laycol in layer_collections.values():
- swap_buffer["B"]["values"].append(get_rto(laycol["ptr"], rto))
+ swap_buffer["B"]["values"].append(get_off_on[
+ get_rto(laycol["ptr"], rto)
+ ][
+ rto
+ ]
+ )
# swap A with B
for x, laycol in enumerate(layer_collections.values()):
- set_rto(laycol["ptr"], swap_buffer["A"]["RTO"], swap_buffer["B"]["values"][x])
- set_rto(laycol["ptr"], swap_buffer["B"]["RTO"], swap_buffer["A"]["values"][x])
+ set_rto(laycol["ptr"], swap_buffer["A"]["RTO"],
+ set_off_on[
+ swap_buffer["A"]["RTO"]
+ ][
+ swap_buffer["B"]["values"][x]
+ ]
+ )
+
+ set_rto(laycol["ptr"], swap_buffer["B"]["RTO"],
+ set_off_on[
+ swap_buffer["B"]["RTO"]
+ ][
+ swap_buffer["A"]["values"][x]
+ ]
+ )
# clear rto history
@@ -289,3 +390,106 @@ def clear_swap(rto):
swap_buffer["A"]["values"].clear()
swap_buffer["B"]["RTO"] = ""
swap_buffer["B"]["values"].clear()
+
+
+def link_child_collections_to_parent(laycol, collection, parent_collection):
+ # store view layer RTOs for all children of the to be deleted collection
+ child_states = {}
+ def get_child_states(layer_collection):
+ child_states[layer_collection.name] = (layer_collection.exclude,
+ layer_collection.hide_viewport,
+ layer_collection.holdout,
+ layer_collection.indirect_only)
+
+ apply_to_children(laycol["ptr"], get_child_states)
+
+ # link any subcollections of the to be deleted collection to it's parent
+ for subcollection in collection.children:
+ if not subcollection.name in parent_collection.children:
+ parent_collection.children.link(subcollection)
+
+ # apply the stored view layer RTOs to the newly linked collections and their
+ # children
+ def restore_child_states(layer_collection):
+ state = child_states.get(layer_collection.name)
+
+ if state:
+ layer_collection.exclude = state[0]
+ layer_collection.hide_viewport = state[1]
+ layer_collection.holdout = state[2]
+ layer_collection.indirect_only = state[3]
+
+ apply_to_children(laycol["parent"]["ptr"], restore_child_states)
+
+
+def remove_collection(laycol, collection, context):
+ # get selected row
+ cm = context.scene.collection_manager
+ selected_row_name = cm.cm_list_collection[cm.cm_list_index].name
+
+ # delete collection
+ bpy.data.collections.remove(collection)
+
+ # update references
+ expanded.discard(laycol["name"])
+
+ if expand_history["target"] == laycol["name"]:
+ expand_history["target"] = ""
+
+ if laycol["name"] in expand_history["history"]:
+ expand_history["history"].remove(laycol["name"])
+
+ if qcd_slots.contains(name=laycol["name"]):
+ qcd_slots.del_slot(name=laycol["name"])
+
+ if laycol["name"] in qcd_slots.overrides:
+ qcd_slots.overrides.remove(laycol["name"])
+
+ # reset history
+ for rto in rto_history.values():
+ rto.clear()
+
+ # update tree view
+ update_property_group(context)
+
+ # update selected row
+ laycol = layer_collections.get(selected_row_name, None)
+ if laycol:
+ cm.cm_list_index = laycol["row_index"]
+
+ elif len(cm.cm_list_collection) <= cm.cm_list_index:
+ cm.cm_list_index = len(cm.cm_list_collection) - 1
+
+ if cm.cm_list_index > -1:
+ name = cm.cm_list_collection[cm.cm_list_index].name
+ laycol = layer_collections[name]
+ while not laycol["visible"]:
+ laycol = laycol["parent"]
+
+ cm.cm_list_index = laycol["row_index"]
+
+
+def select_collection_objects(is_master_collection, collection_name, replace, nested):
+ if is_master_collection:
+ target_collection = bpy.context.view_layer.layer_collection.collection
+
+ else:
+ laycol = layer_collections[collection_name]
+ target_collection = laycol["ptr"].collection
+
+ if replace:
+ bpy.ops.object.select_all(action='DESELECT')
+
+ selection_state = get_move_selection().isdisjoint(target_collection.objects)
+
+ def select_objects(collection):
+ for obj in collection.objects:
+ try:
+ obj.select_set(selection_state)
+ except RuntimeError:
+ pass
+
+ select_objects(target_collection)
+
+ if nested:
+ apply_to_children(target_collection, select_objects)
diff --git a/object_collection_manager/operators.py b/object_collection_manager/operators.py
index 54f9596b..1e265e52 100644
--- a/object_collection_manager/operators.py
+++ b/object_collection_manager/operators.py
@@ -63,6 +63,9 @@ from .operator_utils import (
swap_rtos,
clear_copy,
clear_swap,
+ link_child_collections_to_parent,
+ remove_collection,
+ select_collection_objects,
)
class SetActiveCollection(Operator):
@@ -71,11 +74,11 @@ class SetActiveCollection(Operator):
bl_idname = "view3d.set_active_collection"
bl_options = {'UNDO'}
- collection_index: IntProperty()
+ is_master_collection: BoolProperty()
collection_name: StringProperty()
def execute(self, context):
- if self.collection_index == -1:
+ if self.is_master_collection:
layer_collection = context.view_layer.layer_collection
else:
@@ -98,7 +101,6 @@ class ExpandAllOperator(Operator):
'''Expand/Collapse all collections'''
bl_label = "Expand All Items"
bl_idname = "view3d.expand_all_items"
- bl_options = {'REGISTER', 'UNDO'}
def execute(self, context):
global expand_history
@@ -129,7 +131,6 @@ class ExpandSublevelOperator(Operator):
" * Alt+LMB - Discard history"
)
bl_idname = "view3d.expand_sublevel"
- bl_options = {'REGISTER', 'UNDO'}
expand: BoolProperty()
name: StringProperty()
@@ -215,6 +216,58 @@ class ExpandSublevelOperator(Operator):
return {'FINISHED'}
+class CMSelectCollectionObjectsOperator(Operator):
+ bl_label = "Select All Objects in the Collection"
+ bl_description = (
+ " * LMB - Select all objects in collection.\n"
+ " * Shift+LMB - Add/Remove collection objects from selection.\n"
+ " * Ctrl+LMB - Isolate nested selection.\n"
+ " * Ctrl+Shift+LMB - Add/Remove nested from selection"
+ )
+ bl_idname = "view3d.select_collection_objects"
+ bl_options = {'REGISTER', 'UNDO'}
+
+ is_master_collection: BoolProperty()
+ collection_name: StringProperty()
+
+ def invoke(self, context, event):
+ modifiers = get_modifiers(event)
+
+ if modifiers == {"shift"}:
+ select_collection_objects(
+ is_master_collection=self.is_master_collection,
+ collection_name=self.collection_name,
+ replace=False,
+ nested=False
+ )
+
+ elif modifiers == {"ctrl"}:
+ select_collection_objects(
+ is_master_collection=self.is_master_collection,
+ collection_name=self.collection_name,
+ replace=True,
+ nested=True
+ )
+
+ elif modifiers == {"ctrl", "shift"}:
+ select_collection_objects(
+ is_master_collection=self.is_master_collection,
+ collection_name=self.collection_name,
+ replace=False,
+ nested=True
+ )
+
+ else:
+ select_collection_objects(
+ is_master_collection=self.is_master_collection,
+ collection_name=self.collection_name,
+ replace=True,
+ nested=False
+ )
+
+ return {'FINISHED'}
+
+
class CMSetCollectionOperator(Operator):
bl_label = "Set Object Collection"
bl_description = (
@@ -224,11 +277,11 @@ class CMSetCollectionOperator(Operator):
bl_idname = "view3d.set_collection"
bl_options = {'REGISTER', 'UNDO'}
- collection_index: IntProperty()
+ is_master_collection: BoolProperty()
collection_name: StringProperty()
def invoke(self, context, event):
- if self.collection_index == 0:
+ if self.is_master_collection:
target_collection = context.view_layer.layer_collection.collection
else:
@@ -248,7 +301,7 @@ class CMSetCollectionOperator(Operator):
# make sure there is an active object
if not active_object:
- active_object = selected_objects[0]
+ active_object = tuple(selected_objects)[0]
# check if in collection
if not active_object.name in target_collection.objects:
@@ -856,6 +909,211 @@ class CMUnDisableRenderAllOperator(Operator):
return {'FINISHED'}
+class CMHoldoutOperator(Operator):
+ bl_label = "[HH] Holdout"
+ bl_description = (
+ " * Shift+LMB - Isolate/Restore.\n"
+ " * Shift+Ctrl+LMB - Isolate nested/Restore.\n"
+ " * Ctrl+LMB - Toggle nested.\n"
+ " * Alt+LMB - Discard history"
+ )
+ bl_idname = "view3d.holdout_collection"
+ bl_options = {'REGISTER', 'UNDO'}
+
+ name: StringProperty()
+
+ # static class var
+ isolated = False
+
+ def invoke(self, context, event):
+ global rto_history
+ cls = CMHoldoutOperator
+
+ modifiers = get_modifiers(event)
+ view_layer = context.view_layer.name
+ laycol_ptr = layer_collections[self.name]["ptr"]
+
+ if not view_layer in rto_history["holdout"]:
+ rto_history["holdout"][view_layer] = {"target": "", "history": []}
+
+ if modifiers == {"alt"}:
+ del rto_history["holdout"][view_layer]
+ cls.isolated = False
+
+ elif modifiers == {"shift"}:
+ isolate_rto(cls, self, view_layer, "holdout")
+
+ elif modifiers == {"ctrl"}:
+ toggle_children(self, view_layer, "holdout")
+
+ cls.isolated = False
+
+ elif modifiers == {"ctrl", "shift"}:
+ isolate_rto(cls, self, view_layer, "holdout", children=True)
+
+ else:
+ # toggle holdout
+
+ # reset holdout history
+ del rto_history["holdout"][view_layer]
+
+ # toggle holdout of collection in viewport
+ laycol_ptr.holdout = not laycol_ptr.holdout
+
+ cls.isolated = False
+
+ # reset holdout all history
+ if view_layer in rto_history["holdout_all"]:
+ del rto_history["holdout_all"][view_layer]
+
+ return {'FINISHED'}
+
+
+class CMUnHoldoutAllOperator(Operator):
+ bl_label = "[HH Global] Holdout"
+ bl_description = (
+ " * LMB - Enable all/Restore.\n"
+ " * Shift+LMB - Invert.\n"
+ " * Ctrl+LMB - Copy/Paste RTOs.\n"
+ " * Ctrl+Alt+LMB - Swap RTOs.\n"
+ " * Alt+LMB - Discard history"
+ )
+ bl_idname = "view3d.un_holdout_all_collections"
+ bl_options = {'REGISTER', 'UNDO'}
+
+ def invoke(self, context, event):
+ global rto_history
+
+ view_layer = context.view_layer.name
+ modifiers = get_modifiers(event)
+
+ if not view_layer in rto_history["holdout_all"]:
+ rto_history["holdout_all"][view_layer] = []
+
+ if modifiers == {"alt"}:
+ # clear all states
+ del rto_history["holdout_all"][view_layer]
+ clear_copy("holdout")
+ clear_swap("holdout")
+
+ elif modifiers == {"ctrl"}:
+ copy_rtos(view_layer, "holdout")
+
+ elif modifiers == {"ctrl", "alt"}:
+ swap_rtos(view_layer, "holdout")
+
+ elif modifiers == {"shift"}:
+ invert_rtos(view_layer, "holdout")
+
+ else:
+ activate_all_rtos(view_layer, "holdout")
+
+ return {'FINISHED'}
+
+
+class CMIndirectOnlyOperator(Operator):
+ bl_label = "[IO] Indirect Only"
+ bl_description = (
+ " * Shift+LMB - Isolate/Restore.\n"
+ " * Shift+Ctrl+LMB - Isolate nested/Restore.\n"
+ " * Ctrl+LMB - Toggle nested.\n"
+ " * Alt+LMB - Discard history"
+ )
+ bl_idname = "view3d.indirect_only_collection"
+ bl_options = {'REGISTER', 'UNDO'}
+
+ name: StringProperty()
+
+ # static class var
+ isolated = False
+
+ def invoke(self, context, event):
+ global rto_history
+ cls = CMIndirectOnlyOperator
+
+ modifiers = get_modifiers(event)
+ view_layer = context.view_layer.name
+ laycol_ptr = layer_collections[self.name]["ptr"]
+
+ if not view_layer in rto_history["indirect"]:
+ rto_history["indirect"][view_layer] = {"target": "", "history": []}
+
+
+ if modifiers == {"alt"}:
+ del rto_history["indirect"][view_layer]
+ cls.isolated = False
+
+ elif modifiers == {"shift"}:
+ isolate_rto(cls, self, view_layer, "indirect")
+
+ elif modifiers == {"ctrl"}:
+ toggle_children(self, view_layer, "indirect")
+
+ cls.isolated = False
+
+ elif modifiers == {"ctrl", "shift"}:
+ isolate_rto(cls, self, view_layer, "indirect", children=True)
+
+ else:
+ # toggle indirect only
+
+ # reset indirect history
+ del rto_history["indirect"][view_layer]
+
+ # toggle indirect only of collection
+ laycol_ptr.indirect_only = not laycol_ptr.indirect_only
+
+ cls.isolated = False
+
+ # reset indirect all history
+ if view_layer in rto_history["indirect_all"]:
+ del rto_history["indirect_all"][view_layer]
+
+ return {'FINISHED'}
+
+
+class CMUnIndirectOnlyAllOperator(Operator):
+ bl_label = "[IO Global] Indirect Only"
+ bl_description = (
+ " * LMB - Enable all/Restore.\n"
+ " * Shift+LMB - Invert.\n"
+ " * Ctrl+LMB - Copy/Paste RTOs.\n"
+ " * Ctrl+Alt+LMB - Swap RTOs.\n"
+ " * Alt+LMB - Discard history"
+ )
+ bl_idname = "view3d.un_indirect_only_all_collections"
+ bl_options = {'REGISTER', 'UNDO'}
+
+ def invoke(self, context, event):
+ global rto_history
+
+ view_layer = context.view_layer.name
+ modifiers = get_modifiers(event)
+
+ if not view_layer in rto_history["indirect_all"]:
+ rto_history["indirect_all"][view_layer] = []
+
+ if modifiers == {"alt"}:
+ # clear all states
+ del rto_history["indirect_all"][view_layer]
+ clear_copy("indirect")
+ clear_swap("indirect")
+
+ elif modifiers == {"ctrl"}:
+ copy_rtos(view_layer, "indirect")
+
+ elif modifiers == {"ctrl", "alt"}:
+ swap_rtos(view_layer, "indirect")
+
+ elif modifiers == {"shift"}:
+ invert_rtos(view_layer, "indirect")
+
+ else:
+ activate_all_rtos(view_layer, "indirect")
+
+ return {'FINISHED'}
+
+
class CMRemoveCollectionOperator(Operator):
'''Remove Collection'''
bl_label = "Remove Collection"
@@ -869,12 +1127,9 @@ class CMRemoveCollectionOperator(Operator):
global expand_history
global qcd_slots
- cm = context.scene.collection_manager
-
laycol = layer_collections[self.collection_name]
collection = laycol["ptr"].collection
parent_collection = laycol["parent"]["ptr"].collection
- selected_row_name = cm.cm_list_collection[cm.cm_list_index].name
# shift all objects in this collection to the parent collection
@@ -885,77 +1140,69 @@ class CMRemoveCollectionOperator(Operator):
# shift all child collections to the parent collection preserving view layer RTOs
if collection.children:
- # store view layer RTOs for all children of the to be deleted collection
- child_states = {}
- def get_child_states(layer_collection):
- child_states[layer_collection.name] = (layer_collection.exclude,
- layer_collection.hide_viewport,
- layer_collection.holdout,
- layer_collection.indirect_only)
-
- apply_to_children(laycol["ptr"], get_child_states)
-
- # link any subcollections of the to be deleted collection to it's parent
- for subcollection in collection.children:
- parent_collection.children.link(subcollection)
-
- # apply the stored view layer RTOs to the newly linked collections and their
- # children
- def restore_child_states(layer_collection):
- state = child_states.get(layer_collection.name)
+ link_child_collections_to_parent(laycol, collection, parent_collection)
- if state:
- layer_collection.exclude = state[0]
- layer_collection.hide_viewport = state[1]
- layer_collection.holdout = state[2]
- layer_collection.indirect_only = state[3]
+ # remove collection, update references, and update tree view
+ remove_collection(laycol, collection, context)
- apply_to_children(laycol["parent"]["ptr"], restore_child_states)
-
-
- # remove collection, update expanded, and update tree view
- bpy.data.collections.remove(collection)
- expanded.discard(self.collection_name)
+ return {'FINISHED'}
- if expand_history["target"] == self.collection_name:
- expand_history["target"] = ""
- if self.collection_name in expand_history["history"]:
- expand_history["history"].remove(self.collection_name)
+class CMRemoveEmptyCollectionsOperator(Operator):
+ bl_label = "Remove Empty Collections"
+ bl_idname = "view3d.remove_empty_collections"
+ bl_options = {'UNDO'}
- update_property_group(context)
+ without_objects: BoolProperty()
+ @classmethod
+ def description(cls, context, properties):
+ if properties.without_objects:
+ tooltip = (
+ "Purge All Collections Without Objects.\n"
+ "Deletes all collections that don't contain objects even if they have subcollections"
+ )
- # update selected row
- laycol = layer_collections.get(selected_row_name, None)
- if laycol:
- cm.cm_list_index = laycol["row_index"]
+ else:
+ tooltip = (
+ "Remove Empty Collections.\n"
+ "Delete collections that don't have any subcollections or objects"
+ )
- elif len(cm.cm_list_collection) == cm.cm_list_index:
- cm.cm_list_index -= 1
+ return tooltip
- if cm.cm_list_index > -1:
- name = cm.cm_list_collection[cm.cm_list_index].name
- laycol = layer_collections[name]
- while not laycol["visible"]:
- laycol = laycol["parent"]
+ def execute(self, context):
+ global rto_history
+ global expand_history
+ global qcd_slots
- cm.cm_list_index = laycol["row_index"]
+ if self.without_objects:
+ empty_collections = [laycol["name"]
+ for laycol in layer_collections.values()
+ if not laycol["ptr"].collection.objects]
+ else:
+ empty_collections = [laycol["name"]
+ for laycol in layer_collections.values()
+ if not laycol["children"] and
+ not laycol["ptr"].collection.objects]
+ for name in empty_collections:
+ laycol = layer_collections[name]
+ collection = laycol["ptr"].collection
+ parent_collection = laycol["parent"]["ptr"].collection
- # update qcd
- if qcd_slots.contains(name=self.collection_name):
- qcd_slots.del_slot(name=self.collection_name)
+ # link all child collections to the parent collection preserving view layer RTOs
+ if collection.children:
+ link_child_collections_to_parent(laycol, collection, parent_collection)
- if self.collection_name in qcd_slots.overrides:
- qcd_slots.overrides.remove(self.collection_name)
+ # remove collection, update references, and update tree view
+ remove_collection(laycol, collection, context)
- # reset history
- for rto in rto_history.values():
- rto.clear()
+ self.report({"INFO"}, f"Removed {len(empty_collections)} collections")
return {'FINISHED'}
+
rename = [False]
class CMNewCollectionOperator(Operator):
bl_label = "Add New Collection"
@@ -1070,6 +1317,8 @@ class CMPhantomModeOperator(Operator):
"hide": layer_collection.hide_viewport,
"disable": layer_collection.collection.hide_viewport,
"render": layer_collection.collection.hide_render,
+ "holdout": layer_collection.holdout,
+ "indirect": layer_collection.indirect_only,
}
apply_to_children(view_layer.layer_collection, save_visibility_state)
@@ -1090,6 +1339,8 @@ class CMPhantomModeOperator(Operator):
layer_collection.hide_viewport = phantom_laycol["hide"]
layer_collection.collection.hide_viewport = phantom_laycol["disable"]
layer_collection.collection.hide_render = phantom_laycol["render"]
+ layer_collection.holdout = phantom_laycol["holdout"]
+ layer_collection.indirect_only = phantom_laycol["indirect"]
apply_to_children(view_layer.layer_collection, restore_visibility_state)
@@ -1108,3 +1359,15 @@ class CMPhantomModeOperator(Operator):
return {'FINISHED'}
+
+
+class CMApplyPhantomModeOperator(Operator):
+ '''Apply changes and quit Phantom Mode'''
+ bl_label = "Apply Phantom Mode"
+ bl_idname = "view3d.apply_phantom_mode"
+
+ def execute(self, context):
+ cm = context.scene.collection_manager
+ cm.in_phantom_mode = False
+
+ return {'FINISHED'}
diff --git a/object_collection_manager/qcd_move_widget.py b/object_collection_manager/qcd_move_widget.py
index 95e25058..1b2a6bee 100644
--- a/object_collection_manager/qcd_move_widget.py
+++ b/object_collection_manager/qcd_move_widget.py
@@ -27,10 +27,12 @@ import gpu
from gpu_extras.batch import batch_for_shader
from bpy.types import Operator
+
from .internals import (
layer_collections,
qcd_slots,
)
+
from . import qcd_operators
def spacer():
@@ -338,13 +340,7 @@ def mouse_in_area(mouse_pos, area, buf = 0):
return True
def account_for_view_bounds(area):
- # make sure it renders in the 3d view
- # left
- if area["vert"][0] < 0:
- x = 0
- y = area["vert"][1]
-
- area["vert"] = (x, y)
+ # make sure it renders in the 3d view - prioritize top left
# right
if area["vert"][0] + area["width"] > bpy.context.region.width:
@@ -353,10 +349,10 @@ def account_for_view_bounds(area):
area["vert"] = (x, y)
- # top
- if area["vert"][1] > bpy.context.region.height:
- x = area["vert"][0]
- y = bpy.context.region.height
+ # left
+ if area["vert"][0] < 0:
+ x = 0
+ y = area["vert"][1]
area["vert"] = (x, y)
@@ -367,12 +363,19 @@ def account_for_view_bounds(area):
area["vert"] = (x, y)
+ # top
+ if area["vert"][1] > bpy.context.region.height:
+ x = area["vert"][0]
+ y = bpy.context.region.height
+
+ area["vert"] = (x, y)
+
def update_area_dimensions(area, w=0, h=0):
area["width"] += w
area["height"] += h
class QCDMoveWidget(Operator):
- """QCD Move Widget"""
+ """Move objects to QCD Slots"""
bl_idname = "view3d.qcd_move_widget"
bl_label = "QCD Move Widget"
@@ -390,6 +393,7 @@ class QCDMoveWidget(Operator):
}
last_type = ''
+ initialized = False
moved = False
def modal(self, context, event):
@@ -424,12 +428,16 @@ class QCDMoveWidget(Operator):
self.mouse_pos = (event.mouse_region_x, event.mouse_region_y)
if not mouse_in_area(self.mouse_pos, self.areas["Main Window"], 50 * scale_factor()):
- bpy.types.SpaceView3D.draw_handler_remove(self._handle, 'WINDOW')
+ if self.initialized:
+ bpy.types.SpaceView3D.draw_handler_remove(self._handle, 'WINDOW')
- if self.moved:
- bpy.ops.ed.undo_push()
+ if self.moved:
+ bpy.ops.ed.undo_push()
- return {'FINISHED'}
+ return {'FINISHED'}
+
+ else:
+ self.initialized = True
elif event.value == 'PRESS' and event.type == 'LEFTMOUSE':
if not mouse_in_area(self.mouse_pos, self.areas["Main Window"], 10 * scale_factor()):
@@ -498,13 +506,14 @@ class QCDMoveWidget(Operator):
"height": 0,
"value": None
}
- account_for_view_bounds(main_window)
- # add main window background to areas
self.areas["Main Window"] = main_window
+ allocate_main_ui(self, context)
+ account_for_view_bounds(main_window)
context.window_manager.modal_handler_add(self)
return {'RUNNING_MODAL'}
+
else:
self.report({'WARNING'}, "View3D not found, cannot run operator")
return {'CANCELLED'}
@@ -646,6 +655,10 @@ def allocate_main_ui(self, context):
self.areas["Button Row 2 B"] = button_row_2_b
+ selected_objects = qcd_operators.get_move_selection()
+ active_object = qcd_operators.get_move_active()
+
+
# BUTTONS
def get_buttons(button_row, row_num):
cur_width_pos = button_row["vert"][0]
@@ -658,8 +671,6 @@ def allocate_main_ui(self, context):
if qcd_slot_name:
qcd_laycol = layer_collections[qcd_slot_name]["ptr"]
collection_objects = qcd_laycol.collection.objects
- selected_objects = qcd_operators.get_move_selection()
- active_object = qcd_operators.get_move_active()
# BUTTON
x = cur_width_pos
diff --git a/object_collection_manager/qcd_operators.py b/object_collection_manager/qcd_operators.py
index 7330dd0f..b64c87f8 100644
--- a/object_collection_manager/qcd_operators.py
+++ b/object_collection_manager/qcd_operators.py
@@ -45,6 +45,7 @@ from .internals import (
from .operator_utils import (
apply_to_children,
+ select_collection_objects,
)
@@ -81,7 +82,7 @@ class MoveToQCDSlot(Operator):
# adds object to slot
if self.toggle:
if not active_object:
- active_object = selected_objects[0]
+ active_object = tuple(selected_objects)[0]
if not active_object.name in qcd_laycol.collection.objects:
for obj in selected_objects:
@@ -157,19 +158,32 @@ class ViewMoveQCDSlot(Operator):
if modifiers == {"shift"}:
bpy.ops.view3d.view_qcd_slot(slot=self.slot, toggle=True)
- return {'FINISHED'}
-
elif modifiers == {"ctrl"}:
bpy.ops.view3d.move_to_qcd_slot(slot=self.slot, toggle=False)
- return {'FINISHED'}
elif modifiers == {"ctrl", "shift"}:
bpy.ops.view3d.move_to_qcd_slot(slot=self.slot, toggle=True)
- return {'FINISHED'}
+
+ elif modifiers == {"alt"}:
+ select_collection_objects(
+ is_master_collection=False,
+ collection_name=qcd_slots.get_name(self.slot),
+ replace=True,
+ nested=False
+ )
+
+ elif modifiers == {"alt", "shift"}:
+ select_collection_objects(
+ is_master_collection=False,
+ collection_name=qcd_slots.get_name(self.slot),
+ replace=False,
+ nested=False
+ )
else:
bpy.ops.view3d.view_qcd_slot(slot=self.slot, toggle=False)
- return {'FINISHED'}
+
+ return {'FINISHED'}
class ViewQCDSlot(Operator):
'''View objects in QCD slot'''
@@ -287,9 +301,10 @@ class RenumerateQCDSlots(Operator):
bl_label = "Renumber QCD Slots"
bl_description = (
"Renumber QCD slots.\n"
- " * LMB - Renumber (breadth first) starting from the slot designated 1.\n"
- " * Ctrl+LMB - Renumber (depth first) starting from the slot designated 1.\n"
- " * Alt+LMB - Renumber from the beginning"
+ " * LMB - Renumber (breadth first) from slot 1.\n"
+ " * +Ctrl - Linear.\n"
+ " * +Alt - Reset.\n"
+ " * +Shift - Constrain to branch"
)
bl_idname = "view3d.renumerate_qcd_slots"
bl_options = {'REGISTER', 'UNDO'}
@@ -299,14 +314,22 @@ class RenumerateQCDSlots(Operator):
modifiers = get_modifiers(event)
- if modifiers == {'alt'}:
- qcd_slots.renumerate(beginning=True)
+ beginning = False
+ depth_first = False
+ constrain = False
- elif modifiers == {'ctrl'}:
- qcd_slots.renumerate(depth_first=True)
+ if 'alt' in modifiers:
+ beginning=True
- else:
- qcd_slots.renumerate()
+ if 'ctrl' in modifiers:
+ depth_first=True
+
+ if 'shift' in modifiers:
+ constrain=True
+
+ qcd_slots.renumerate(beginning=beginning,
+ depth_first=depth_first,
+ constrain=constrain)
update_property_group(context)
diff --git a/object_collection_manager/ui.py b/object_collection_manager/ui.py
index 3bd614a6..4c1eb077 100644
--- a/object_collection_manager/ui.py
+++ b/object_collection_manager/ui.py
@@ -21,6 +21,7 @@
import bpy
from bpy.types import (
+ Menu,
Operator,
Panel,
UIList,
@@ -58,6 +59,7 @@ last_icon_theme_text_sel = None
class CollectionManager(Operator):
+ '''Manage and control collections, with advanced features, in a popup UI'''
bl_label = "Collection Manager"
bl_idname = "view3d.collection_manager"
@@ -111,9 +113,9 @@ class CollectionManager(Operator):
layout.row().separator()
# buttons
- button_row = layout.row()
+ button_row_1 = layout.row()
- op_sec = button_row.row()
+ op_sec = button_row_1.row()
op_sec.alignment = 'LEFT'
collapse_sec = op_sec.row()
@@ -137,11 +139,12 @@ class CollectionManager(Operator):
renum_sec.alignment = 'LEFT'
renum_sec.operator("view3d.renumerate_qcd_slots")
- # filter
- filter_sec = button_row.row()
- filter_sec.alignment = 'RIGHT'
+ # menu & filter
+ right_sec = button_row_1.row()
+ right_sec.alignment = 'RIGHT'
- filter_sec.popover(panel="COLLECTIONMANAGER_PT_display_options",
+ right_sec.menu("VIEW3D_MT_CM_specials_menu")
+ right_sec.popover(panel="COLLECTIONMANAGER_PT_display_options",
text="", icon='FILTER')
mc_box = layout.box()
@@ -156,7 +159,7 @@ class CollectionManager(Operator):
prop = c_icon.operator("view3d.set_active_collection",
text='', icon='GROUP', depress=highlight)
- prop.collection_index = -1
+ prop.is_master_collection = True
prop.collection_name = 'Master Collection'
master_collection_row.separator()
@@ -175,8 +178,12 @@ class CollectionManager(Operator):
row_setcol = global_rto_row.row()
row_setcol.alignment = 'LEFT'
row_setcol.operator_context = 'INVOKE_DEFAULT'
+
selected_objects = get_move_selection()
active_object = get_move_active()
+ CM_UL_items.selected_objects = selected_objects
+ CM_UL_items.active_object = active_object
+
collection = context.view_layer.layer_collection.collection
icon = 'MESH_CUBE'
@@ -185,7 +192,7 @@ class CollectionManager(Operator):
if active_object and active_object.name in collection.objects:
icon = 'SNAP_VOLUME'
- elif not set(selected_objects).isdisjoint(collection.objects):
+ elif not selected_objects.isdisjoint(collection.objects):
icon = 'STICKY_UVS_LOC'
else:
@@ -193,7 +200,7 @@ class CollectionManager(Operator):
prop = row_setcol.operator("view3d.set_collection", text="",
icon=icon, emboss=False)
- prop.collection_index = 0
+ prop.is_master_collection = True
prop.collection_name = 'Master Collection'
copy_icon = 'COPYDOWN'
@@ -295,6 +302,44 @@ class CollectionManager(Operator):
global_rto_row.operator("view3d.un_disable_render_all_collections", text="", icon=icon, depress=depress)
+ if cm.show_holdout:
+ holdout_all_history = rto_history["holdout_all"].get(view_layer.name, [])
+ depress = True if len(holdout_all_history) else False
+ icon = 'HOLDOUT_ON'
+ buffers = [False, False]
+
+ if copy_buffer["RTO"] == "holdout":
+ icon = copy_icon
+ buffers[0] = True
+
+ if swap_buffer["A"]["RTO"] == "holdout":
+ icon = swap_icon
+ buffers[1] = True
+
+ if buffers[0] and buffers[1]:
+ icon = copy_swap_icon
+
+ global_rto_row.operator("view3d.un_holdout_all_collections", text="", icon=icon, depress=depress)
+
+ if cm.show_indirect_only:
+ indirect_all_history = rto_history["indirect_all"].get(view_layer.name, [])
+ depress = True if len(indirect_all_history) else False
+ icon = 'INDIRECT_ONLY_ON'
+ buffers = [False, False]
+
+ if copy_buffer["RTO"] == "indirect":
+ icon = copy_icon
+ buffers[0] = True
+
+ if swap_buffer["A"]["RTO"] == "indirect":
+ icon = swap_icon
+ buffers[1] = True
+
+ if buffers[0] and buffers[1]:
+ icon = copy_swap_icon
+
+ global_rto_row.operator("view3d.un_indirect_only_all_collections", text="", icon=icon, depress=depress)
+
# treeview
layout.row().template_list("CM_UL_items", "",
cm, "cm_list_collection",
@@ -303,17 +348,23 @@ class CollectionManager(Operator):
sort_lock=True)
# add collections
- addcollec_row = layout.row()
- addcollec_row.operator("view3d.add_collection", text="Add Collection",
- icon='COLLECTION_NEW').child = False
+ button_row_2 = layout.row()
+ prop = button_row_2.operator("view3d.add_collection", text="Add Collection",
+ icon='COLLECTION_NEW')
+ prop.child = False
+
+ prop = button_row_2.operator("view3d.add_collection", text="Add SubCollection",
+ icon='COLLECTION_NEW')
+ prop.child = True
- addcollec_row.operator("view3d.add_collection", text="Add SubCollection",
- icon='COLLECTION_NEW').child = True
+
+ button_row_3 = layout.row()
# phantom mode
- phantom_row = layout.row()
+ phantom_mode = button_row_3.row(align=True)
toggle_text = "Disable " if cm.in_phantom_mode else "Enable "
- phantom_row.operator("view3d.toggle_phantom_mode", text=toggle_text+"Phantom Mode")
+ phantom_mode.operator("view3d.toggle_phantom_mode", text=toggle_text+"Phantom Mode")
+ phantom_mode.operator("view3d.apply_phantom_mode", text="", icon='CHECKMARK')
if cm.in_phantom_mode:
view.enabled = False
@@ -368,7 +419,7 @@ class CollectionManager(Operator):
else:
- for rto in ["exclude", "select", "hide", "disable", "render"]:
+ for rto in ["exclude", "select", "hide", "disable", "render", "holdout", "indirect"]:
if new_state[rto] != collection_state[rto]:
if view_layer.name in rto_history[rto]:
del rto_history[rto][view_layer.name]
@@ -428,6 +479,9 @@ class CollectionManager(Operator):
class CM_UL_items(UIList):
last_filter_value = ""
+ selected_objects = set()
+ active_object = None
+
filter_by_selected: BoolProperty(
name="Filter By Selected",
default=False,
@@ -447,8 +501,8 @@ class CM_UL_items(UIList):
view_layer = context.view_layer
laycol = layer_collections[item.name]
collection = laycol["ptr"].collection
- selected_objects = get_move_selection()
- active_object = get_move_active()
+ selected_objects = CM_UL_items.selected_objects
+ active_object = CM_UL_items.active_object
column = layout.column(align=True)
@@ -505,7 +559,7 @@ class CM_UL_items(UIList):
prop = c_icon.operator("view3d.set_active_collection", text='', icon='GROUP',
emboss=highlight, depress=highlight)
- prop.collection_index = laycol["row_index"]
+ prop.is_master_collection = False
prop.collection_name = item.name
if prefs.enable_qcd:
@@ -536,7 +590,7 @@ class CM_UL_items(UIList):
if active_object and active_object.name in collection.objects:
icon = 'SNAP_VOLUME'
- elif not set(selected_objects).isdisjoint(collection.objects):
+ elif not selected_objects.isdisjoint(collection.objects):
icon = 'STICKY_UVS_LOC'
else:
@@ -545,7 +599,7 @@ class CM_UL_items(UIList):
prop = set_obj_col.operator("view3d.set_collection", text="",
icon=icon, emboss=False)
- prop.collection_index = laycol["id"]
+ prop.is_master_collection = False
prop.collection_name = item.name
@@ -557,8 +611,9 @@ class CM_UL_items(UIList):
highlight = bool(exclude_history and exclude_target == item.name)
icon = 'CHECKBOX_DEHLT' if laycol["ptr"].exclude else 'CHECKBOX_HLT'
- row.operator("view3d.exclude_collection", text="", icon=icon,
- emboss=highlight, depress=highlight).name = item.name
+ prop = row.operator("view3d.exclude_collection", text="", icon=icon,
+ emboss=highlight, depress=highlight)
+ prop.name = item.name
if cm.show_selectable:
select_history_base = rto_history["select"].get(view_layer.name, {})
@@ -569,8 +624,9 @@ class CM_UL_items(UIList):
icon = ('RESTRICT_SELECT_ON' if laycol["ptr"].collection.hide_select else
'RESTRICT_SELECT_OFF')
- row.operator("view3d.restrict_select_collection", text="", icon=icon,
- emboss=highlight, depress=highlight).name = item.name
+ prop = row.operator("view3d.restrict_select_collection", text="", icon=icon,
+ emboss=highlight, depress=highlight)
+ prop.name = item.name
if cm.show_hide_viewport:
hide_history_base = rto_history["hide"].get(view_layer.name, {})
@@ -580,8 +636,9 @@ class CM_UL_items(UIList):
highlight = bool(hide_history and hide_target == item.name)
icon = 'HIDE_ON' if laycol["ptr"].hide_viewport else 'HIDE_OFF'
- row.operator("view3d.hide_collection", text="", icon=icon,
- emboss=highlight, depress=highlight).name = item.name
+ prop = row.operator("view3d.hide_collection", text="", icon=icon,
+ emboss=highlight, depress=highlight)
+ prop.name = item.name
if cm.show_disable_viewport:
disable_history_base = rto_history["disable"].get(view_layer.name, {})
@@ -592,8 +649,9 @@ class CM_UL_items(UIList):
icon = ('RESTRICT_VIEW_ON' if laycol["ptr"].collection.hide_viewport else
'RESTRICT_VIEW_OFF')
- row.operator("view3d.disable_viewport_collection", text="", icon=icon,
- emboss=highlight, depress=highlight).name = item.name
+ prop = row.operator("view3d.disable_viewport_collection", text="", icon=icon,
+ emboss=highlight, depress=highlight)
+ prop.name = item.name
if cm.show_render:
render_history_base = rto_history["render"].get(view_layer.name, {})
@@ -604,8 +662,35 @@ class CM_UL_items(UIList):
icon = ('RESTRICT_RENDER_ON' if laycol["ptr"].collection.hide_render else
'RESTRICT_RENDER_OFF')
- row.operator("view3d.disable_render_collection", text="", icon=icon,
- emboss=highlight, depress=highlight).name = item.name
+ prop = row.operator("view3d.disable_render_collection", text="", icon=icon,
+ emboss=highlight, depress=highlight)
+ prop.name = item.name
+
+ if cm.show_holdout:
+ holdout_history_base = rto_history["holdout"].get(view_layer.name, {})
+ holdout_target = holdout_history_base.get("target", "")
+ holdout_history = holdout_history_base.get("history", [])
+
+ highlight = bool(holdout_history and holdout_target == item.name)
+ icon = ('HOLDOUT_ON' if laycol["ptr"].holdout else
+ 'HOLDOUT_OFF')
+
+ prop = row.operator("view3d.holdout_collection", text="", icon=icon,
+ emboss=highlight, depress=highlight)
+ prop.name = item.name
+
+ if cm.show_indirect_only:
+ indirect_history_base = rto_history["indirect"].get(view_layer.name, {})
+ indirect_target = indirect_history_base.get("target", "")
+ indirect_history = indirect_history_base.get("history", [])
+
+ highlight = bool(indirect_history and indirect_target == item.name)
+ icon = ('INDIRECT_ONLY_ON' if laycol["ptr"].indirect_only else
+ 'INDIRECT_ONLY_OFF')
+
+ prop = row.operator("view3d.indirect_only_collection", text="", icon=icon,
+ emboss=highlight, depress=highlight)
+ prop.name = item.name
@@ -615,8 +700,8 @@ class CM_UL_items(UIList):
row.separator()
rm_op = row.row()
- rm_op.operator("view3d.remove_collection", text="", icon='X',
- emboss=False).collection_name = item.name
+ prop = rm_op.operator("view3d.remove_collection", text="", icon='X', emboss=False)
+ prop.collection_name = item.name
if len(data.cm_list_collection) > index + 1:
@@ -730,6 +815,8 @@ class CMDisplayOptionsPanel(Panel):
row.prop(cm, "show_hide_viewport", icon='HIDE_OFF', icon_only=True)
row.prop(cm, "show_disable_viewport", icon='RESTRICT_VIEW_OFF', icon_only=True)
row.prop(cm, "show_render", icon='RESTRICT_RENDER_OFF', icon_only=True)
+ row.prop(cm, "show_holdout", icon='HOLDOUT_ON', icon_only=True)
+ row.prop(cm, "show_indirect_only", icon='INDIRECT_ONLY_ON', icon_only=True)
layout.separator()
@@ -740,6 +827,21 @@ class CMDisplayOptionsPanel(Panel):
row.prop(cm, "align_local_ops")
+class SpecialsMenu(Menu):
+ bl_label = "Specials"
+ bl_idname = "VIEW3D_MT_CM_specials_menu"
+
+ def draw(self, context):
+ layout = self.layout
+
+ prop = layout.operator("view3d.remove_empty_collections")
+ prop.without_objects = False
+
+ prop = layout.operator("view3d.remove_empty_collections",
+ text="Purge All Collections Without Objects")
+ prop.without_objects = True
+
+
def view3d_header_qcd_slots(self, context):
layout = self.layout
@@ -752,14 +854,15 @@ def view3d_header_qcd_slots(self, context):
update_collection_tree(context)
+ selected_objects = get_move_selection()
+ active_object = get_move_active()
+
for x in range(20):
qcd_slot_name = qcd_slots.get_name(str(x+1))
if qcd_slot_name:
qcd_laycol = layer_collections[qcd_slot_name]["ptr"]
collection_objects = qcd_laycol.collection.objects
- selected_objects = get_move_selection()
- active_object = get_move_active()
icon_value = 0
@@ -768,9 +871,8 @@ def view3d_header_qcd_slots(self, context):
active_object.name in collection_objects):
icon = 'LAYER_ACTIVE'
-
# if there are selected objects use LAYER_ACTIVE
- elif not set(selected_objects).isdisjoint(collection_objects):
+ elif not selected_objects.isdisjoint(collection_objects):
icon = 'LAYER_USED'
# If there are objects use LAYER_USED
diff --git a/object_skinify.py b/object_skinify.py
index 177f8de9..f102d8cf 100644
--- a/object_skinify.py
+++ b/object_skinify.py
@@ -551,8 +551,8 @@ def generate_mesh(shape_object, size, thickness=0.8, finger_thickness=0.25, sub_
# object mode apply all modifiers
if apply_mod:
- bpy.ops.object.modifier_apply(override, apply_as='DATA', modifier="Skin")
- bpy.ops.object.modifier_apply(override, apply_as='DATA', modifier="Subsurf")
+ bpy.ops.object.modifier_apply(override, modifier="Skin")
+ bpy.ops.object.modifier_apply(override, modifier="Subsurf")
return {'FINISHED'}
diff --git a/oscurart_tools/__init__.py b/oscurart_tools/__init__.py
index 0d340cc8..f630b456 100644
--- a/oscurart_tools/__init__.py
+++ b/oscurart_tools/__init__.py
@@ -76,7 +76,7 @@ class VIEW3D_MT_edit_mesh_oscurarttools(Menu):
layout.operator("mesh.uv_island_copy")
layout.operator("mesh.uv_island_paste")
layout.operator("mesh.select_doubles")
- layout.operator("mesh.print_uv_stats")
+ layout.operator("mesh.print_uv_stats")
layout.separator()
layout.operator("image.reload_images_osc")
layout.operator("file.save_incremental_backup")
@@ -124,7 +124,7 @@ class VIEW3D_MT_object_oscurarttools(Menu):
layout.operator("object.search_and_select_osc")
layout.operator("object.shape_key_to_objects_osc")
layout.operator("mesh.apply_linked_meshes")
- layout.operator("mesh.print_uv_stats")
+ layout.operator("mesh.print_uv_stats")
layout.separator()
layout.operator("image.reload_images_osc")
layout.operator("file.save_incremental_backup")
diff --git a/oscurart_tools/mesh/overlap_uvs.py b/oscurart_tools/mesh/overlap_uvs.py
index d0d13752..8b2a893c 100644
--- a/oscurart_tools/mesh/overlap_uvs.py
+++ b/oscurart_tools/mesh/overlap_uvs.py
@@ -27,66 +27,57 @@ from bpy.props import (
FloatProperty,
EnumProperty,
)
-import os
-import bmesh
-
-C = bpy.context
-D = bpy.data
+import bmesh
# -------------------------- OVERLAP UV ISLANDS
def defCopyUvsIsland(self, context):
- bpy.ops.object.mode_set(mode="OBJECT")
- global obLoop
- global islandFaces
- obLoop = []
- islandFaces = []
- for poly in bpy.context.object.data.polygons:
- if poly.select:
- islandFaces.append(poly.index)
- for li in poly.loop_indices:
- obLoop.append(li)
-
- bpy.ops.object.mode_set(mode="EDIT")
+ global islandSet
+ islandSet = {}
+ islandSet["Loop"] = []
+
+ bpy.context.scene.tool_settings.use_uv_select_sync = True
+ bpy.ops.uv.select_linked()
+ bm = bmesh.from_edit_mesh(bpy.context.object.data)
+ uv_lay = bm.loops.layers.uv.active
+ faceSel = 0
+ for face in bm.faces:
+ if face.select:
+ faceSel +=1
+ for loop in face.loops:
+ islandSet["Loop"].append(loop[uv_lay].uv.copy())
+ islandSet["Size"] = faceSel
def defPasteUvsIsland(self, uvOffset, rotateUv,context):
- bpy.ops.object.mode_set(mode="OBJECT")
- selPolys = [poly.index for poly in bpy.context.object.data.polygons if poly.select]
-
- for island in selPolys:
- bpy.ops.object.mode_set(mode="EDIT")
+ bm = bmesh.from_edit_mesh(bpy.context.object.data)
+ bpy.context.scene.tool_settings.use_uv_select_sync = True
+ pickedFaces = [face for face in bm.faces if face.select]
+ for face in pickedFaces:
bpy.ops.mesh.select_all(action="DESELECT")
- bpy.ops.object.mode_set(mode="OBJECT")
- bpy.context.object.data.polygons[island].select = True
- bpy.ops.object.mode_set(mode="EDIT")
- bpy.ops.mesh.select_linked()
- bpy.ops.object.mode_set(mode="OBJECT")
- TobLoop = []
- TislandFaces = []
- for poly in bpy.context.object.data.polygons:
- if poly.select:
- TislandFaces.append(poly.index)
- for li in poly.loop_indices:
- TobLoop.append(li)
-
- for source,target in zip(range(min(obLoop),max(obLoop)+1),range(min(TobLoop),max(TobLoop)+1)):
- bpy.context.object.data.uv_layers.active.data[target].uv = bpy.context.object.data.uv_layers.active.data[source].uv + Vector((uvOffset,0))
-
- bpy.ops.object.mode_set(mode="EDIT")
-
- if rotateUv:
- bpy.ops.object.mode_set(mode="OBJECT")
- for poly in selPolys:
- bpy.context.object.data.polygons[poly].select = True
- bpy.ops.object.mode_set(mode="EDIT")
- bm = bmesh.from_edit_mesh(bpy.context.object.data)
- bmesh.ops.reverse_uvs(bm, faces=[f for f in bm.faces if f.select])
- bmesh.ops.rotate_uvs(bm, faces=[f for f in bm.faces if f.select])
- #bmesh.update_edit_mesh(bpy.context.object.data, tessface=False, destructive=False)
-
-
+ face.select=True
+ bmesh.update_edit_mesh(bpy.context.object.data)
+ bpy.ops.uv.select_linked()
+ uv_lay = bm.loops.layers.uv.active
+ faceSel = 0
+ for face in bm.faces:
+ if face.select:
+ faceSel +=1
+ i = 0
+ if faceSel == islandSet["Size"]:
+ for face in bm.faces:
+ if face.select:
+ for loop in face.loops:
+ loop[uv_lay].uv = islandSet["Loop"][i] if uvOffset == False else islandSet["Loop"][i]+Vector((1,0))
+ i += 1
+ else:
+ print("the island have a different size of geometry")
+
+ if rotateUv:
+ bpy.ops.object.mode_set(mode="EDIT")
+ bmesh.ops.reverse_uvs(bm, faces=[f for f in bm.faces if f.select])
+ bmesh.ops.rotate_uvs(bm, faces=[f for f in bm.faces if f.select])
class CopyUvIsland(Operator):
"""Copy Uv Island"""
@@ -119,6 +110,7 @@ class PasteUvIsland(Operator):
name="Rotate Uv Corner",
default=False
)
+
@classmethod
def poll(cls, context):
return (context.active_object is not None and
diff --git a/oscurart_tools/mesh/print_uv_stats.py b/oscurart_tools/mesh/print_uv_stats.py
index 7488456d..6cc673a9 100644
--- a/oscurart_tools/mesh/print_uv_stats.py
+++ b/oscurart_tools/mesh/print_uv_stats.py
@@ -70,14 +70,14 @@ def calcMeshArea(ob):
polyArea = 0
for poly in ob.data.polygons:
polyArea += poly.area
- ta = "UvGain: %s%s || " % (round(totalArea * 100),"%")
+ ta = "UvGain: %s%s || " % (round(totalArea * 100),"%")
ma = "MeshArea: %s || " % (polyArea)
pg = "PixelsGain: %s || " % (round(totalArea * (pixels[0] * pixels[1])))
pl = "PixelsLost: %s || " % ((pixels[0]*pixels[1]) - round(totalArea * (pixels[0] * pixels[1])))
- tx = "Texel: %s pix/meter" % (round(sqrt(totalArea * pixels[0] * pixels[1] / polyArea)))
- GlobLog = ta+ma+pg+pl+tx
+ tx = "Texel: %s pix/meter" % (round(sqrt(totalArea * pixels[0] * pixels[1] / polyArea)))
+ GlobLog = ta+ma+pg+pl+tx
+
-
class uvStats(bpy.types.Operator):
@@ -90,7 +90,7 @@ class uvStats(bpy.types.Operator):
def poll(cls, context):
return context.active_object is not None
- def execute(self, context):
+ def execute(self, context):
if round(
bpy.context.object.scale.x,
2) == 1 and round(
@@ -101,25 +101,25 @@ class uvStats(bpy.types.Operator):
if setImageRes(bpy.context.object):
makeTessellate(bpy.context.object)
calcArea()
- calcMeshArea(bpy.context.object)
+ calcMeshArea(bpy.context.object)
else:
print("Warning: Non Uniform Scale Object")
-
+
copyOb = bpy.context.object.copy()
copyMe = bpy.context.object.data.copy()
bpy.context.scene.collection.objects.link(copyOb)
copyOb.data = copyMe
bpy.ops.object.select_all(action="DESELECT")
copyOb.select_set(1)
- bpy.ops.object.transform_apply()
-
+ bpy.ops.object.transform_apply()
+
if setImageRes(copyOb):
makeTessellate(copyOb)
calcArea()
calcMeshArea(copyOb)
-
+
bpy.data.objects.remove(copyOb)
- bpy.data.meshes.remove(copyMe)
-
- self.report({'INFO'}, GlobLog)
- return {'FINISHED'} \ No newline at end of file
+ bpy.data.meshes.remove(copyMe)
+
+ self.report({'INFO'}, GlobLog)
+ return {'FINISHED'}
diff --git a/oscurart_tools/render/material_overrides.py b/oscurart_tools/render/material_overrides.py
index db200572..d8aade82 100644
--- a/oscurart_tools/render/material_overrides.py
+++ b/oscurart_tools/render/material_overrides.py
@@ -11,13 +11,13 @@ def ApplyOverrides(dummy):
global obDict
for override in bpy.context.scene.ovlist:
-
+
# set collections clean name
- collClean = override.colloverride
+ collClean = override.colloverride
obClean = override.oboverride
-
-
-
+
+
+
if collClean != None:
for ob in collClean.all_objects:
if ob.type == "MESH": #si es un mesh
@@ -28,22 +28,22 @@ def ApplyOverrides(dummy):
for iob in ob.instance_collection.all_objects:
if iob.type == "MESH":
if not iob.hide_viewport and not iob.hide_render:
- obDict.append([iob,[mat for mat in iob.data.materials]])
+ obDict.append([iob,[mat for mat in iob.data.materials]])
else:
- obDict.append([obClean,[mat for mat in obClean.data.materials]])
-
-
+ obDict.append([obClean,[mat for mat in obClean.data.materials]])
+
+
for override in bpy.context.scene.ovlist:
-
+
# set collections clean name
- collClean = override.colloverride
- # set material clean name
- matClean = override.matoverride
- # set objeto clean name
- obClean = override.oboverride
-
+ collClean = override.colloverride
+ # set material clean name
+ matClean = override.matoverride
+ # set objeto clean name
+ obClean = override.oboverride
+
print(matClean)
-
+
if collClean != None:
for ob in collClean.all_objects:
if ob.type == "MESH":
@@ -56,12 +56,12 @@ def ApplyOverrides(dummy):
if iob.type == "MESH":
if not iob.hide_viewport and not iob.hide_render:
for i,mat in enumerate(iob.data.materials):
- iob.data.materials[i] = matClean
+ iob.data.materials[i] = matClean
else:
if obClean.type == "MESH":
- if not obClean.hide_viewport and not obClean.hide_render:
- for i,mat in enumerate(obClean.data.materials):
- obClean.data.materials[i] = matClean
+ if not obClean.hide_viewport and not obClean.hide_render:
+ for i,mat in enumerate(obClean.data.materials):
+ obClean.data.materials[i] = matClean
@persistent
@@ -79,22 +79,22 @@ def RestoreOverrides(dummy):
-class OscOverridesProp(bpy.types.PropertyGroup):
+class OscOverridesProp(bpy.types.PropertyGroup):
colloverride: bpy.props.PointerProperty(
name="Collection Override",
type=bpy.types.Collection,
description="All objects in this collection will be override",
- )
+ )
oboverride: bpy.props.PointerProperty(
name="Object Override",
type=bpy.types.Object,
description="Only this object will be override.",
- )
+ )
matoverride: bpy.props.PointerProperty(
name="Material Override",
type=bpy.types.Material,
description="Material for override objects",
- )
+ )
bpy.utils.register_class(OscOverridesProp)
bpy.types.Scene.ovlist = bpy.props.CollectionProperty(type=OscOverridesProp)
@@ -118,9 +118,9 @@ class OVERRIDES_PT_OscOverridesGUI(bpy.types.Panel):
col.operator("render.overrides_transfer")
for i, m in enumerate(bpy.context.scene.ovlist):
colrow = col.row(align=1)
- colrow.prop(m, "colloverride", text="")
- colrow.prop(m, "oboverride", text="")
- colrow.prop(m, "matoverride", text="")
+ colrow.prop(m, "colloverride", text="")
+ colrow.prop(m, "oboverride", text="")
+ colrow.prop(m, "matoverride", text="")
if i != len(bpy.context.scene.ovlist) - 1:
pa = colrow.operator(
"ovlist.move_down",
diff --git a/oscurart_tools/render/render_tokens.py b/oscurart_tools/render/render_tokens.py
index 3ae7bf5d..69c79ad8 100644
--- a/oscurart_tools/render/render_tokens.py
+++ b/oscurart_tools/render/render_tokens.py
@@ -30,19 +30,19 @@ def replaceTokens (dummy):
"$Scene":bpy.context.scene.name,
"$File":os.path.basename(bpy.data.filepath).split(".")[0],
"$ViewLayer":bpy.context.view_layer.name,
- "$Camera":bpy.context.scene.camera.name}
+ "$Camera": "NoCamera" if bpy.context.scene.camera == None else bpy.context.scene.camera.name}
renpath = bpy.context.scene.render.filepath
-
+
nodeDict = []
#compositor nodes
if bpy.context.scene.use_nodes:
for node in bpy.context.scene.node_tree.nodes:
if node.type == "OUTPUT_FILE":
- nodeDict.append([node,node.base_path])
+ nodeDict.append([node,node.base_path])
node.base_path = node.base_path.replace("$Scene",tokens["$Scene"]).replace("$File",tokens["$File"]).replace("$ViewLayer",tokens["$ViewLayer"]).replace("$Camera",tokens["$Camera"])
-
-
+
+
bpy.context.scene.render.filepath = renpath.replace("$Scene",tokens["$Scene"]).replace("$File",tokens["$File"]).replace("$ViewLayer",tokens["$ViewLayer"]).replace("$Camera",tokens["$Camera"])
print(bpy.context.scene.render.filepath)
@@ -52,7 +52,7 @@ def replaceTokens (dummy):
def restoreTokens (dummy):
global renpath
bpy.context.scene.render.filepath = renpath
-
+
#restore nodes
for node in nodeDict:
node[0].base_path = node[1]
diff --git a/power_sequencer/operators/scene_create_from_selection.py b/power_sequencer/operators/scene_create_from_selection.py
index 57c6a6df..b615dae9 100644
--- a/power_sequencer/operators/scene_create_from_selection.py
+++ b/power_sequencer/operators/scene_create_from_selection.py
@@ -53,7 +53,7 @@ class POWER_SEQUENCER_OT_scene_create_from_selection(bpy.types.Operator):
def execute(self, context):
start_scene_name = context.scene.name
-
+
if len(context.selected_sequences) != 0:
selection = context.selected_sequences[:]
selection_start_frame = min(
@@ -63,11 +63,11 @@ class POWER_SEQUENCER_OT_scene_create_from_selection(bpy.types.Operator):
# Create new scene for the scene strip
bpy.ops.scene.new(type="FULL_COPY")
-
+
context.window.scene.name = context.selected_sequences[0].name
new_scene_name = context.window.scene.name
-
-
+
+
###after full copy also unselected strips are in the sequencer... Delete those strips
bpy.ops.sequencer.select_all(action="INVERT")
bpy.ops.power_sequencer.delete_direct()
@@ -80,7 +80,7 @@ class POWER_SEQUENCER_OT_scene_create_from_selection(bpy.types.Operator):
bpy.ops.sequencer.select_all()
bpy.ops.power_sequencer.preview_to_selection()
- # Back to start scene
+ # Back to start scene
bpy.context.window.scene = bpy.data.scenes[start_scene_name]
bpy.ops.power_sequencer.delete_direct()
diff --git a/power_sequencer/scripts/BPSRender/bpsrender/__init__.py b/power_sequencer/scripts/BPSRender/bpsrender/__init__.py
index 35a40273..f14cfb6a 100644
--- a/power_sequencer/scripts/BPSRender/bpsrender/__init__.py
+++ b/power_sequencer/scripts/BPSRender/bpsrender/__init__.py
@@ -14,4 +14,3 @@
# You should have received a copy of the GNU General Public License along with Power Sequencer. If
# not, see <https://www.gnu.org/licenses/>.
#
-
diff --git a/presets/interface_theme/deep_grey.xml b/presets/interface_theme/deep_grey.xml
index b6ddb751..951b4875 100644
--- a/presets/interface_theme/deep_grey.xml
+++ b/presets/interface_theme/deep_grey.xml
@@ -107,7 +107,7 @@
</wcol_text>
<wcol_option>
<ThemeWidgetColors
- outline="#e6e6e6"
+ outline="#cccccc"
inner="#47474700"
inner_sel="#cbcbcbff"
item="#000000ff"
@@ -502,7 +502,7 @@
text_hi="#fcfcfc"
header="#474747ff"
header_text="#d9d9d9"
- header_text_hi="#FCFCFC"
+ header_text_hi="#fcfcfc"
button="#474747ff"
button_title="#cccccc"
button_text="#b8b8b8"
@@ -605,7 +605,7 @@
text_hi="#ffffff"
header="#474747ff"
header_text="#d9d9d9"
- header_text_hi="#FCFCFC"
+ header_text_hi="#fcfcfc"
button="#474747ff"
button_title="#cccccc"
button_text="#b8b8b8"
@@ -680,7 +680,7 @@
text_hi="#fcfcfc"
header="#474747ff"
header_text="#d9d9d9"
- header_text_hi="#FCFCFC"
+ header_text_hi="#fcfcfc"
button="#474747ff"
button_title="#cccccc"
button_text="#b8b8b8"
@@ -823,7 +823,7 @@
text_hi="#ffffff"
header="#474747ff"
header_text="#d9d9d9"
- header_text_hi="#FCFCFC"
+ header_text_hi="#fcfcfc"
button="#474747ff"
button_title="#cccccc"
button_text="#b8b8b8"
@@ -964,7 +964,7 @@
text_hi="#d9d9d9"
header="#474747ff"
header_text="#d9d9d9"
- header_text_hi="#FCFCFC"
+ header_text_hi="#fcfcfc"
button="#47474700"
button_title="#cccccc"
button_text="#b8b8b8"
@@ -1204,7 +1204,7 @@
text_hi="#ffffff"
header="#474747ff"
header_text="#d9d9d9"
- header_text_hi="#FCFCFC"
+ header_text_hi="#fcfcfc"
button="#474747ff"
button_title="#cccccc"
button_text="#b8b8b8"
@@ -1486,4 +1486,4 @@
</ThemeFontStyle>
</widget>
</ThemeStyle>
-</bpy>
+</bpy> \ No newline at end of file
diff --git a/render_povray/__init__.py b/render_povray/__init__.py
index 7a6332ee..fded5cff 100644
--- a/render_povray/__init__.py
+++ b/render_povray/__init__.py
@@ -25,16 +25,16 @@ Scene Description Language. The script has been split in as few files
as possible :
___init__.py :
- Initialize variables
+ Initialize properties
update_files.py
- Update new variables to values from older API. This file needs an update.
+ Update new variables to values from older API. This file needs an update
ui.py :
- Provide property buttons for the user to set up the variables.
+ Provide property buttons for the user to set up the variables
primitives.py :
- Display some POV native primitives in 3D view for input and output.
+ Display some POV native primitives in 3D view for input and output
shading.py
Translate shading properties to declared textures at the top of a pov file
@@ -50,7 +50,7 @@ render.py :
Along these essential files also coexist a few additional libraries to help make
-Blender stand up to other POV IDEs such as povwin or QTPOV.
+Blender stand up to other POV IDEs such as povwin or QTPOV
presets :
Material (sss)
apple.py ; chicken.py ; cream.py ; Ketchup.py ; marble.py ;
@@ -64,11 +64,14 @@ Blender stand up to other POV IDEs such as povwin or QTPOV.
01_Clear_Blue_Sky.py ; 02_Partly_Hazy_Sky.py ; 03_Overcast_Sky.py ;
04_Cartoony_Sky.py ; 05_Under_Water.py ;
Light
- 01_(5400K)_Direct_Sun.py ; 02_(5400K)_High_Noon_Sun.py ;
+ 01_(4800K)_Direct_Sun.py ;
+ 02_(5400K)_High_Noon_Sun.py ;
03_(6000K)_Daylight_Window.py ;
04_(6000K)_2500W_HMI_(Halogen_Metal_Iodide).py ;
- 05_(4000K)_100W_Metal_Halide.py ; 06_(3200K)_100W_Quartz_Halogen.py ;
- 07_(2850K)_100w_Tungsten.py ; 08_(2600K)_40w_Tungsten.py ;
+ 05_(4000K)_100W_Metal_Halide.py ;
+ 06_(3200K)_100W_Quartz_Halogen.py ;
+ 07_(2850K)_100w_Tungsten.py ;
+ 08_(2600K)_40w_Tungsten.py ;
09_(5000K)_75W_Full_Spectrum_Fluorescent_T12.py ;
10_(4300K)_40W_Vintage_Fluorescent_T12.py ;
11_(5000K)_18W_Standard_Fluorescent_T8 ;
@@ -78,10 +81,13 @@ Blender stand up to other POV IDEs such as povwin or QTPOV.
15_(3200K)_40W_Induction_Fluorescent.py ;
16_(2100K)_150W_High_Pressure_Sodium.py ;
17_(1700K)_135W_Low_Pressure_Sodium.py ;
- 18_(6800K)_175W_Mercury_Vapor.py ; 19_(5200K)_700W_Carbon_Arc.py ;
- 20_(6500K)_15W_LED_Spot.py ; 21_(2700K)_7W_OLED_Panel.py ;
+ 18_(6800K)_175W_Mercury_Vapor.py ;
+ 19_(5200K)_700W_Carbon_Arc.py ;
+ 20_(6500K)_15W_LED_Spot.py ;
+ 21_(2700K)_7W_OLED_Panel.py ;
22_(30000K)_40W_Black_Light_Fluorescent.py ;
- 23_(30000K)_40W_Black_Light_Bulb.py; 24_(1850K)_Candle.py
+ 23_(30000K)_40W_Black_Light_Bulb.py;
+ 24_(1850K)_Candle.py
templates:
abyss.pov ; biscuit.pov ; bsp_Tango.pov ; chess2.pov ;
cornell.pov ; diffract.pov ; diffuse_back.pov ; float5 ;
@@ -98,20 +104,23 @@ bl_info = {
"Bastien Montagne, "
"Constantin Rahn, "
"Silvio Falcinelli",
- "version": (0, 1, 0),
+ "version": (0, 1, 1),
"blender": (2, 81, 0),
"location": "Render Properties > Render Engine > Persistence of Vision",
"description": "Persistence of Vision integration for blender",
"doc_url": "{BLENDER_MANUAL_URL}/addons/render/povray.html",
"category": "Render",
+ "warning": "Under active development, seeking co-maintainer(s)",
}
if "bpy" in locals():
import importlib
importlib.reload(ui)
+ importlib.reload(nodes)
importlib.reload(render)
importlib.reload(shading)
+ importlib.reload(primitives)
importlib.reload(update_files)
else:
@@ -121,13 +130,18 @@ else:
import nodeitems_utils # for Nodes
from nodeitems_utils import NodeCategory, NodeItem # for Nodes
from bl_operators.presets import AddPresetBase
- from bpy.types import AddonPreferences, PropertyGroup
+ from bpy.types import (
+ AddonPreferences,
+ PropertyGroup,
+ NodeSocket,
+ )
+
from bpy.props import (
+ FloatVectorProperty,
StringProperty,
BoolProperty,
IntProperty,
FloatProperty,
- FloatVectorProperty,
EnumProperty,
PointerProperty,
CollectionProperty,
@@ -137,39 +151,94 @@ else:
def string_strip_hyphen(name):
- """Remove hyphen characters from a string to avoid POV errors."""
+ """Remove hyphen characters from a string to avoid POV errors"""
return name.replace("-", "")
+def pov_context_tex_datablock(context):
+ """Texture context type recreated as deprecated in blender 2.8"""
+
+ idblock = context.brush
+ if idblock and context.scene.texture_context == 'OTHER':
+ return idblock
+
+ # idblock = bpy.context.active_object.active_material
+ idblock = context.view_layer.objects.active.active_material
+ if idblock and context.scene.texture_context == 'MATERIAL':
+ return idblock
+
+ idblock = context.world
+ if idblock and context.scene.texture_context == 'WORLD':
+ return idblock
+
+ idblock = context.light
+ if idblock and context.scene.texture_context == 'LIGHT':
+ return idblock
+
+ if context.particle_system and context.scene.texture_context == 'PARTICLES':
+ idblock = context.particle_system.settings
+
+ return idblock
+
+ idblock = context.line_style
+ if idblock and context.scene.texture_context == 'LINESTYLE':
+ return idblock
+
+def brush_texture_update(self, context):
+
+ """Brush texture rolldown must show active slot texture props"""
+ idblock = pov_context_tex_datablock(context)
+ if idblock is not None:
+ #mat = context.view_layer.objects.active.active_material
+ idblock = pov_context_tex_datablock(context)
+ slot = idblock.pov_texture_slots[idblock.pov.active_texture_index]
+ tex = slot.texture
+
+ if tex:
+ # Switch paint brush to active texture so slot and settings remain contextual
+ bpy.context.tool_settings.image_paint.brush.texture = bpy.data.textures[tex]
+ bpy.context.tool_settings.image_paint.brush.mask_texture = bpy.data.textures[tex]
def active_texture_name_from_uilist(self, context):
- mat = context.scene.view_layers["View Layer"].objects.active.active_material
- index = mat.pov.active_texture_index
- name = mat.pov_texture_slots[index].name
- newname = mat.pov_texture_slots[index].texture
- tex = bpy.data.textures[name]
- tex.name = newname
- mat.pov_texture_slots[index].name = newname
+
+ idblock = pov_context_tex_datablock(context)
+ #mat = context.view_layer.objects.active.active_material
+ if idblock is not None:
+ index = idblock.pov.active_texture_index
+ name = idblock.pov_texture_slots[index].name
+ newname = idblock.pov_texture_slots[index].texture
+ tex = bpy.data.textures[name]
+ tex.name = newname
+ idblock.pov_texture_slots[index].name = newname
def active_texture_name_from_search(self, context):
- mat = context.scene.view_layers["View Layer"].objects.active.active_material
- index = mat.pov.active_texture_index
- name = mat.pov_texture_slots[index].texture_search
+ """Texture rolldown to change the data linked by an existing texture"""
+ idblock = pov_context_tex_datablock(context)
+ #mat = context.view_layer.objects.active.active_material
+ if idblock is not None:
+ index = idblock.pov.active_texture_index
+ slot = idblock.pov_texture_slots[index]
+ name = slot.texture_search
+
try:
tex = bpy.data.textures[name]
- mat.pov_texture_slots[index].name = name
- mat.pov_texture_slots[index].texture = name
+ slot.name = name
+ slot.texture = name
+ # Switch paint brush to this texture so settings remain contextual
+ #bpy.context.tool_settings.image_paint.brush.texture = tex
+ #bpy.context.tool_settings.image_paint.brush.mask_texture = tex
except:
pass
+
###############################################################################
# Scene POV properties.
###############################################################################
class RenderPovSettingsScene(PropertyGroup):
- """Declare scene level properties controllable in UI and translated to POV."""
+ """Declare scene level properties controllable in UI and translated to POV"""
# Linux SDL-window enable
sdl_window_enable: BoolProperty(
@@ -770,7 +839,7 @@ class RenderPovSettingsScene(PropertyGroup):
name="Error Bound",
description="One of the two main speed/quality tuning values, "
"lower values are more accurate",
- min=0.0, max=1000.0, soft_min=0.1, soft_max=10.0, default=1.8
+ min=0.0, max=1000.0, soft_min=0.1, soft_max=10.0, default=10.0
)
radio_gray_threshold: FloatProperty(
@@ -837,14 +906,15 @@ class RenderPovSettingsScene(PropertyGroup):
name="Pretrace Start",
description="Fraction of the screen width which sets the size of the "
"blocks in the mosaic preview first pass",
- min=0.01, max=1.00, soft_min=0.02, soft_max=1.0, default=0.08
+ min=0.005, max=1.00, soft_min=0.02, soft_max=1.0, default=0.04
)
-
+ # XXX TODO set automatically to pretrace_end = 8 / max (image_width, image_height)
+ # for non advanced mode
radio_pretrace_end: FloatProperty(
name="Pretrace End",
description="Fraction of the screen width which sets the size of the blocks "
"in the mosaic preview last pass",
- min=0.000925, max=1.00, soft_min=0.01, soft_max=1.00, default=0.04, precision=3
+ min=0.000925, max=1.00, soft_min=0.01, soft_max=1.00, default=0.004, precision=3
)
###############################################################################
@@ -856,19 +926,28 @@ class MaterialTextureSlot(PropertyGroup):
bl_idname="pov_texture_slots",
bl_description="Texture_slots from Blender-2.79",
+ # Adding a "real" texture datablock as property is not possible
+ # (or at least not easy through a dynamically populated EnumProperty).
+ # That's why we'll use a prop_search() UILayout function in ui.py.
+ # So we'll assign the name of the needed texture datablock to the below StringProperty.
texture : StringProperty(update=active_texture_name_from_uilist)
- texture_search : StringProperty(update=active_texture_name_from_search)
+ # and use another temporary StringProperty to change the linked data
+ texture_search : StringProperty(
+ name="",
+ update = active_texture_name_from_search,
+ description = "Browse Texture to be linked",
+ )
alpha_factor: FloatProperty(
name="Alpha",
description="Amount texture affects alpha",
- default = 0.0,
+ default = 1.0,
)
ambient_factor: FloatProperty(
name="",
description="Amount texture affects ambient",
- default = 0.0,
+ default = 1.0,
)
bump_method: EnumProperty(
@@ -897,49 +976,49 @@ class MaterialTextureSlot(PropertyGroup):
density_factor: FloatProperty(
name="",
description="Amount texture affects density",
- default = 0.0,
+ default = 1.0,
)
diffuse_color_factor: FloatProperty(
name="",
description="Amount texture affects diffuse color",
- default = 0.0,
+ default = 1.0,
)
diffuse_factor: FloatProperty(
name="",
description="Amount texture affects diffuse reflectivity",
- default = 0.0,
+ default = 1.0,
)
displacement_factor: FloatProperty(
name="",
description="Amount texture displaces the surface",
- default = 0.0,
+ default = 0.2,
)
emission_color_factor: FloatProperty(
name="",
description="Amount texture affects emission color",
- default = 0.0,
+ default = 1.0,
)
emission_factor: FloatProperty(
name="",
description="Amount texture affects emission",
- default = 0.0,
+ default = 1.0,
)
emit_factor: FloatProperty(
name="",
description="Amount texture affects emission",
- default = 0.0,
+ default = 1.0,
)
hardness_factor: FloatProperty(
name="",
description="Amount texture affects hardness",
- default = 0.0,
+ default = 1.0,
)
mapping: EnumProperty(
@@ -985,13 +1064,13 @@ class MaterialTextureSlot(PropertyGroup):
mirror_factor: FloatProperty(
name="",
description="Amount texture affects mirror color",
- default = 0.0,
+ default = 1.0,
)
normal_factor: FloatProperty(
name="",
description="Amount texture affects normal values",
- default = 0.0,
+ default = 1.0,
)
normal_map_space: EnumProperty(
@@ -1013,39 +1092,65 @@ class MaterialTextureSlot(PropertyGroup):
raymir_factor: FloatProperty(
name="",
description="Amount texture affects ray mirror",
- default = 0.0,
+ default = 1.0,
)
reflection_color_factor: FloatProperty(
name="",
description="Amount texture affects color of out-scattered light",
- default = 0.0,
+ default = 1.0,
)
reflection_factor: FloatProperty(
name="",
description="Amount texture affects brightness of out-scattered light",
- default = 0.0,
+ default = 1.0,
)
scattering_factor: FloatProperty(
name="",
description="Amount texture affects scattering",
- default = 0.0,
+ default = 1.0,
)
specular_color_factor: FloatProperty(
name="",
description="Amount texture affects specular color",
- default = 0.0,
+ default = 1.0,
)
specular_factor: FloatProperty(
name="",
description="Amount texture affects specular reflectivity",
- default = 0.0,
+ default = 1.0,
+ )
+
+ offset: FloatVectorProperty(
+ name="Offset",
+ description=("Fine tune of the texture mapping X, Y and Z locations "),
+ precision=4,
+ step=0.1,
+ soft_min= -100.0,
+ soft_max=100.0,
+ default=(0.0,0.0,0.0),
+ options={'ANIMATABLE'},
+ subtype='TRANSLATION',
)
+ scale: FloatVectorProperty(
+ name="Size",
+ subtype='XYZ',
+ size=3,
+ description="Set scaling for the texture’s X, Y and Z sizes ",
+ precision=4,
+ step=0.1,
+ soft_min= -100.0,
+ soft_max=100.0,
+ default=(1.0,1.0,1.0),
+ options={'ANIMATABLE'},
+ )
+
+
texture_coords: EnumProperty(
name="",
description="",
@@ -1068,13 +1173,13 @@ class MaterialTextureSlot(PropertyGroup):
translucency_factor: FloatProperty(
name="",
description="Amount texture affects translucency",
- default = 0.0,
+ default = 1.0,
)
transmission_color_factor: FloatProperty(
name="",
description="Amount texture affects result color after light has been scattered/absorbed",
- default = 0.0,
+ default = 1.0,
)
use: BoolProperty(
@@ -1095,6 +1200,12 @@ class MaterialTextureSlot(PropertyGroup):
default = False,
)
+ use_interpolation: BoolProperty(
+ name="",
+ description="Interpolates pixels using selected filter ",
+ default = False,
+ )
+
use_map_alpha: BoolProperty(
name="",
description="Causes the texture to affect the alpha value",
@@ -1110,7 +1221,7 @@ class MaterialTextureSlot(PropertyGroup):
use_map_color_diffuse: BoolProperty(
name="",
description="Causes the texture to affect basic color of the material",
- default = False,
+ default = True,
)
use_map_color_emission: BoolProperty(
@@ -1234,7 +1345,7 @@ class MaterialTextureSlot(PropertyGroup):
)
-#######################################"
+#######################################
blend_factor: FloatProperty(
name="Blend",
@@ -1328,10 +1439,10 @@ bpy.types.ID.texture_context = EnumProperty(
default = 'MATERIAL',
)
-bpy.types.ID.active_texture_index = IntProperty(
- name = "Index for texture_slots",
- default = 0,
-)
+# bpy.types.ID.active_texture_index = IntProperty(
+ # name = "Index for texture_slots",
+ # default = 0,
+# )
class RenderPovSettingsMaterial(PropertyGroup):
"""Declare material level properties controllable in UI and translated to POV."""
@@ -1360,6 +1471,7 @@ class RenderPovSettingsMaterial(PropertyGroup):
active_texture_index: IntProperty(
name = "Index for texture_slots",
default = 0,
+ update = brush_texture_update
)
transparency_method: EnumProperty(
@@ -2123,7 +2235,7 @@ class MaterialRaytraceTransparency(PropertyGroup):
gloss_samples: IntProperty(
name="Samples",
- description="Number of cone samples averaged for blurry refractions",
+ description="frequency of the noise sample used for blurry refractions",
min=0, max=1024, default=18
)
@@ -2205,8 +2317,8 @@ class MaterialRaytraceMirror(PropertyGroup):
)
gloss_samples: IntProperty(
- name="Samples",
- description="Number of cone samples averaged for blurry reflections",
+ name="Noise",
+ description="Frequency of the noise pattern bumps averaged for blurry reflections",
min=0, max=1024, default=18,
)
@@ -3223,7 +3335,7 @@ class MaterialStrandSettings(PropertyGroup):
# Povray Nodes
###############################################################################
-class PovraySocketUniversal(bpy.types.NodeSocket):
+class PovraySocketUniversal(NodeSocket):
bl_idname = 'PovraySocketUniversal'
bl_label = 'Povray Socket'
value_unlimited: bpy.props.FloatProperty(default=0.0)
@@ -3276,7 +3388,7 @@ class PovraySocketUniversal(bpy.types.NodeSocket):
def draw_color(self, context, node):
return (1, 0, 0, 1)
-class PovraySocketFloat_0_1(bpy.types.NodeSocket):
+class PovraySocketFloat_0_1(NodeSocket):
bl_idname = 'PovraySocketFloat_0_1'
bl_label = 'Povray Socket'
default_value: bpy.props.FloatProperty(description="Input node Value_0_1",min=0,max=1,default=0)
@@ -3289,7 +3401,7 @@ class PovraySocketFloat_0_1(bpy.types.NodeSocket):
def draw_color(self, context, node):
return (0.5, 0.7, 0.7, 1)
-class PovraySocketFloat_0_10(bpy.types.NodeSocket):
+class PovraySocketFloat_0_10(NodeSocket):
bl_idname = 'PovraySocketFloat_0_10'
bl_label = 'Povray Socket'
default_value: bpy.props.FloatProperty(description="Input node Value_0_10",min=0,max=10,default=0)
@@ -3304,7 +3416,7 @@ class PovraySocketFloat_0_10(bpy.types.NodeSocket):
def draw_color(self, context, node):
return (0.65, 0.65, 0.65, 1)
-class PovraySocketFloat_10(bpy.types.NodeSocket):
+class PovraySocketFloat_10(NodeSocket):
bl_idname = 'PovraySocketFloat_10'
bl_label = 'Povray Socket'
default_value: bpy.props.FloatProperty(description="Input node Value_10",min=-10,max=10,default=0)
@@ -3319,7 +3431,7 @@ class PovraySocketFloat_10(bpy.types.NodeSocket):
def draw_color(self, context, node):
return (0.65, 0.65, 0.65, 1)
-class PovraySocketFloatPositive(bpy.types.NodeSocket):
+class PovraySocketFloatPositive(NodeSocket):
bl_idname = 'PovraySocketFloatPositive'
bl_label = 'Povray Socket'
default_value: bpy.props.FloatProperty(description="Input Node Value Positive", min=0.0, default=0)
@@ -3331,7 +3443,7 @@ class PovraySocketFloatPositive(bpy.types.NodeSocket):
def draw_color(self, context, node):
return (0.045, 0.005, 0.136, 1)
-class PovraySocketFloat_000001_10(bpy.types.NodeSocket):
+class PovraySocketFloat_000001_10(NodeSocket):
bl_idname = 'PovraySocketFloat_000001_10'
bl_label = 'Povray Socket'
default_value: bpy.props.FloatProperty(min=0.000001,max=10,default=0.000001)
@@ -3343,7 +3455,7 @@ class PovraySocketFloat_000001_10(bpy.types.NodeSocket):
def draw_color(self, context, node):
return (1, 0, 0, 1)
-class PovraySocketFloatUnlimited(bpy.types.NodeSocket):
+class PovraySocketFloatUnlimited(NodeSocket):
bl_idname = 'PovraySocketFloatUnlimited'
bl_label = 'Povray Socket'
default_value: bpy.props.FloatProperty(default = 0.0)
@@ -3355,7 +3467,7 @@ class PovraySocketFloatUnlimited(bpy.types.NodeSocket):
def draw_color(self, context, node):
return (0.7, 0.7, 1, 1)
-class PovraySocketInt_1_9(bpy.types.NodeSocket):
+class PovraySocketInt_1_9(NodeSocket):
bl_idname = 'PovraySocketInt_1_9'
bl_label = 'Povray Socket'
default_value: bpy.props.IntProperty(description="Input node Value_1_9",min=1,max=9,default=6)
@@ -3367,7 +3479,7 @@ class PovraySocketInt_1_9(bpy.types.NodeSocket):
def draw_color(self, context, node):
return (1, 0.7, 0.7, 1)
-class PovraySocketInt_0_256(bpy.types.NodeSocket):
+class PovraySocketInt_0_256(NodeSocket):
bl_idname = 'PovraySocketInt_0_256'
bl_label = 'Povray Socket'
default_value: bpy.props.IntProperty(min=0,max=255,default=0)
@@ -3380,7 +3492,7 @@ class PovraySocketInt_0_256(bpy.types.NodeSocket):
return (0.5, 0.5, 0.5, 1)
-class PovraySocketPattern(bpy.types.NodeSocket):
+class PovraySocketPattern(NodeSocket):
bl_idname = 'PovraySocketPattern'
bl_label = 'Povray Socket'
@@ -3417,7 +3529,7 @@ class PovraySocketPattern(bpy.types.NodeSocket):
def draw_color(self, context, node):
return (1, 1, 1, 1)
-class PovraySocketColor(bpy.types.NodeSocket):
+class PovraySocketColor(NodeSocket):
bl_idname = 'PovraySocketColor'
bl_label = 'Povray Socket'
@@ -3434,7 +3546,7 @@ class PovraySocketColor(bpy.types.NodeSocket):
def draw_color(self, context, node):
return (1, 1, 0, 1)
-class PovraySocketColorRGBFT(bpy.types.NodeSocket):
+class PovraySocketColorRGBFT(NodeSocket):
bl_idname = 'PovraySocketColorRGBFT'
bl_label = 'Povray Socket'
@@ -3452,7 +3564,7 @@ class PovraySocketColorRGBFT(bpy.types.NodeSocket):
def draw_color(self, context, node):
return (1, 1, 0, 1)
-class PovraySocketTexture(bpy.types.NodeSocket):
+class PovraySocketTexture(NodeSocket):
bl_idname = 'PovraySocketTexture'
bl_label = 'Povray Socket'
default_value: bpy.props.IntProperty()
@@ -3464,7 +3576,7 @@ class PovraySocketTexture(bpy.types.NodeSocket):
-class PovraySocketTransform(bpy.types.NodeSocket):
+class PovraySocketTransform(NodeSocket):
bl_idname = 'PovraySocketTransform'
bl_label = 'Povray Socket'
default_value: bpy.props.IntProperty(min=0,max=255,default=0)
@@ -3474,7 +3586,7 @@ class PovraySocketTransform(bpy.types.NodeSocket):
def draw_color(self, context, node):
return (99/255, 99/255, 199/255, 1)
-class PovraySocketNormal(bpy.types.NodeSocket):
+class PovraySocketNormal(NodeSocket):
bl_idname = 'PovraySocketNormal'
bl_label = 'Povray Socket'
default_value: bpy.props.IntProperty(min=0,max=255,default=0)
@@ -3484,7 +3596,7 @@ class PovraySocketNormal(bpy.types.NodeSocket):
def draw_color(self, context, node):
return (0.65, 0.65, 0.65, 1)
-class PovraySocketSlope(bpy.types.NodeSocket):
+class PovraySocketSlope(NodeSocket):
bl_idname = 'PovraySocketSlope'
bl_label = 'Povray Socket'
default_value: bpy.props.FloatProperty(min = 0.0, max = 1.0)
@@ -3500,7 +3612,7 @@ class PovraySocketSlope(bpy.types.NodeSocket):
def draw_color(self, context, node):
return (0, 0, 0, 1)
-class PovraySocketMap(bpy.types.NodeSocket):
+class PovraySocketMap(NodeSocket):
bl_idname = 'PovraySocketMap'
bl_label = 'Povray Socket'
default_value: bpy.props.StringProperty()
@@ -3774,7 +3886,7 @@ class RenderPovSettingsTexture(PropertyGroup):
('checker', "Checker", "", 'PLUGIN', 34),
('hexagon', "Hexagon", "", 'PLUGIN', 35),
('object', "Mesh", "", 'PLUGIN', 36),
- ('emulator', "Internal Emulator", "", 'PLUG', 37)
+ ('emulator', "Blender Type Emulator", "", 'SCRIPTPLUGINS', 37)
),
default='emulator',
)
@@ -5167,7 +5279,7 @@ class RenderPovSettingsWorld(PropertyGroup):
items=(
('MATERIAL', "", "Show material textures", "MATERIAL",0), # "Show material textures"
('WORLD', "", "Show world textures", "WORLD",1), # "Show world textures"
- ('LAMP', "", "Show lamp textures", "LIGHT",2), # "Show lamp textures"
+ ('LIGHT', "", "Show lamp textures", "LIGHT",2), # "Show lamp textures"
('PARTICLES', "", "Show particles textures", "PARTICLES",3), # "Show particles textures"
('LINESTYLE', "", "Show linestyle textures", "LINE_DATA",4), # "Show linestyle textures"
('OTHER', "", "Show other data textures", "TEXTURE_DATA",5), # "Show other data textures"
@@ -5215,12 +5327,27 @@ class RenderPovSettingsWorld(PropertyGroup):
)
active_texture_index: IntProperty(
name = "Index for texture_slots",
- default = 0
+ default = 0,
+ update = brush_texture_update
)
-
class WorldTextureSlot(PropertyGroup):
- """Declare world texture slot properties controllable in UI and translated to POV."""
+ """Declare world texture slot level properties for UI and translated to POV."""
+
+ bl_idname="pov_texture_slots",
+ bl_description="Texture_slots from Blender-2.79",
+
+ # Adding a "real" texture datablock as property is not possible
+ # (or at least not easy through a dynamically populated EnumProperty).
+ # That's why we'll use a prop_search() UILayout function in ui.py.
+ # So we'll assign the name of the needed texture datablock to the below StringProperty.
+ texture : StringProperty(update=active_texture_name_from_uilist)
+ # and use another temporary StringProperty to change the linked data
+ texture_search : StringProperty(
+ name="",
+ update = active_texture_name_from_search,
+ description = "Browse Texture to be linked",
+ )
blend_factor: FloatProperty(
name="Blend",
@@ -5241,6 +5368,31 @@ class WorldTextureSlot(PropertyGroup):
default="",
)
+ offset: FloatVectorProperty(
+ name="Offset",
+ description=("Fine tune of the texture mapping X, Y and Z locations "),
+ precision=4,
+ step=0.1,
+ soft_min= -100.0,
+ soft_max=100.0,
+ default=(0.0,0.0,0.0),
+ options={'ANIMATABLE'},
+ subtype='TRANSLATION',
+ )
+
+ scale: FloatVectorProperty(
+ name="Size",
+ subtype='XYZ',
+ size=3,
+ description="Set scaling for the texture’s X, Y and Z sizes ",
+ precision=4,
+ step=0.1,
+ soft_min= -100.0,
+ soft_max=100.0,
+ default=(1.0,1.0,1.0),
+ options={'ANIMATABLE'},
+ )
+
texture_coords: EnumProperty(
name="Coordinates",
description="Texture coordinates used to map the texture onto the background",
@@ -5308,7 +5460,7 @@ for i in range(18): # length of world texture slots
class MATERIAL_TEXTURE_SLOTS_UL_POV_layerlist(bpy.types.UIList):
# texture_slots:
- index: bpy.props.IntProperty(name='index')
+ #index: bpy.props.IntProperty(name='index')
# foo = random prop
def draw_item(self, context, layout, data, item, icon, active_data, active_propname):
ob = data
@@ -5385,11 +5537,44 @@ class PovrayPreferences(AddonPreferences):
subtype='FILE_PATH',
)
+ use_sounds: BoolProperty(
+ name="Use Sound",
+ description="Signaling end of the render process at various"
+ "stages can help if you're away from monitor",
+ default=False,
+ )
+
+ # TODO: Auto find POV sound directory as it does for binary
+ # And implement the three cases, left uncommented for a dummy
+ # interface in case some doc screenshots get made for that area
+ filepath_complete_sound: StringProperty(
+ name="Finish Render Sound",
+ description="Path to finished render sound file",
+ subtype='FILE_PATH',
+ )
+
+ filepath_parse_error_sound: StringProperty(
+ name="Parse Error Sound",
+ description="Path to parsing time error sound file",
+ subtype='FILE_PATH',
+ )
+
+ filepath_cancel_sound: StringProperty(
+ name="Cancel Render Sound",
+ description="Path to cancelled or render time error sound file",
+ subtype='FILE_PATH',
+ )
+
+ #shall we not move this to UI file?
def draw(self, context):
layout = self.layout
layout.prop(self, "branch_feature_set_povray")
layout.prop(self, "filepath_povray")
layout.prop(self, "docpath_povray")
+ layout.prop(self, "filepath_complete_sound")
+ layout.prop(self, "filepath_parse_error_sound")
+ layout.prop(self, "filepath_cancel_sound")
+ layout.prop(self, "use_sounds", icon='SOUND')
classes = (
@@ -5451,7 +5636,7 @@ def register():
bpy.types.Light.pov = PointerProperty(type=RenderPovSettingsLight)
bpy.types.World.pov = PointerProperty(type=RenderPovSettingsWorld)
bpy.types.Material.pov_texture_slots = CollectionProperty(type=MaterialTextureSlot)
- bpy.types.World.texture_slots = CollectionProperty(type=WorldTextureSlot)
+ bpy.types.World.pov_texture_slots = CollectionProperty(type=WorldTextureSlot)
bpy.types.Text.pov = PointerProperty(type=RenderPovSettingsText)
@@ -5468,6 +5653,7 @@ def unregister():
del bpy.types.Camera.pov
del bpy.types.Light.pov
del bpy.types.World.pov
+ del bpy.types.World.pov_texture_slots
del bpy.types.Material.pov_texture_slots
del bpy.types.Text.pov
diff --git a/render_povray/nodes.py b/render_povray/nodes.py
index be535db3..bbdb9754 100644
--- a/render_povray/nodes.py
+++ b/render_povray/nodes.py
@@ -21,7 +21,14 @@
import bpy
from bpy.utils import register_class
-from bpy.types import Node, ShaderNodeTree, CompositorNodeTree, TextureNodeTree#, NodeSocket
+from bpy.types import (
+ Node,
+ ShaderNodeTree,
+ CompositorNodeTree,
+ TextureNodeTree,
+ #NodeSocket,
+ Operator,
+ )
from bpy.props import (
StringProperty,
BoolProperty,
@@ -1025,7 +1032,7 @@ class TextureOutputNode(Node, TextureNodeTree):
##################################################################################
-class NODE_OT_iso_add(bpy.types.Operator):
+class NODE_OT_iso_add(Operator):
bl_idname = "pov.nodeisoadd"
bl_label = "Create iso props"
@@ -1042,7 +1049,7 @@ class NODE_OT_iso_add(bpy.types.Operator):
isonode.label = ob.name
return {'FINISHED'}
-class NODE_OT_map_create(bpy.types.Operator):
+class NODE_OT_map_create(Operator):
bl_idname = "node.map_create"
bl_label = "Create map"
@@ -1067,7 +1074,7 @@ class NODE_OT_map_create(bpy.types.Operator):
mat = context.object.active_material
layout.prop(mat.pov,"inputs_number")
-class NODE_OT_povray_node_texture_map_add(bpy.types.Operator):
+class NODE_OT_povray_node_texture_map_add(Operator):
bl_idname = "pov.nodetexmapadd"
bl_label = "Texture map"
@@ -1091,7 +1098,7 @@ class NODE_OT_povray_node_texture_map_add(bpy.types.Operator):
return {'FINISHED'}
-class NODE_OT_povray_node_output_add(bpy.types.Operator):
+class NODE_OT_povray_node_output_add(Operator):
bl_idname = "pov.nodeoutputadd"
bl_label = "Output"
@@ -1105,7 +1112,7 @@ class NODE_OT_povray_node_output_add(bpy.types.Operator):
tmap.label="Output"
return {'FINISHED'}
-class NODE_OT_povray_node_layered_add(bpy.types.Operator):
+class NODE_OT_povray_node_layered_add(Operator):
bl_idname = "pov.nodelayeredadd"
bl_label = "Layered material"
@@ -1116,7 +1123,7 @@ class NODE_OT_povray_node_layered_add(bpy.types.Operator):
tmap.label="Layered material"
return {'FINISHED'}
-class NODE_OT_povray_input_add(bpy.types.Operator):
+class NODE_OT_povray_input_add(Operator):
bl_idname = "pov.nodeinputadd"
bl_label = "Add entry"
@@ -1141,7 +1148,7 @@ class NODE_OT_povray_input_add(bpy.types.Operator):
return {'FINISHED'}
-class NODE_OT_povray_input_remove(bpy.types.Operator):
+class NODE_OT_povray_input_remove(Operator):
bl_idname = "pov.nodeinputremove"
bl_label = "Remove input"
@@ -1159,7 +1166,7 @@ class NODE_OT_povray_input_remove(bpy.types.Operator):
els.remove(el)
return {'FINISHED'}
-class NODE_OT_povray_image_open(bpy.types.Operator):
+class NODE_OT_povray_image_open(Operator):
bl_idname = "pov.imageopen"
bl_label = "Open"
@@ -1181,7 +1188,7 @@ class NODE_OT_povray_image_open(bpy.types.Operator):
return {'FINISHED'}
-# class TEXTURE_OT_povray_open_image(bpy.types.Operator):
+# class TEXTURE_OT_povray_open_image(Operator):
# bl_idname = "pov.openimage"
# bl_label = "Open Image"
@@ -1204,7 +1211,7 @@ class NODE_OT_povray_image_open(bpy.types.Operator):
# view_layer.update()
# return {'FINISHED'}
-class PovrayPatternNode(bpy.types.Operator):
+class PovrayPatternNode(Operator):
bl_idname = "pov.patternnode"
bl_label = "Pattern"
@@ -1259,7 +1266,7 @@ class PovrayPatternNode(bpy.types.Operator):
context.window_manager.modal_handler_add(self)
return {'RUNNING_MODAL'}
-class UpdatePreviewMaterial(bpy.types.Operator):
+class UpdatePreviewMaterial(Operator):
'''Operator update preview material'''
bl_idname = "node.updatepreview"
bl_label = "Update preview"
@@ -1283,7 +1290,7 @@ class UpdatePreviewMaterial(bpy.types.Operator):
context.window_manager.modal_handler_add(self)
return {'RUNNING_MODAL'}
-class UpdatePreviewKey(bpy.types.Operator):
+class UpdatePreviewKey(Operator):
'''Operator update preview keymap'''
bl_idname = "wm.updatepreviewkey"
bl_label = "Activate RMB"
diff --git a/render_povray/primitives.py b/render_povray/primitives.py
index a9d68d44..6d864220 100644
--- a/render_povray/primitives.py
+++ b/render_povray/primitives.py
@@ -26,7 +26,7 @@ from bpy_extras.io_utils import ImportHelper
from bpy_extras import object_utils
from bpy.utils import register_class
from math import atan, pi, degrees, sqrt, cos, sin
-
+from bpy.types import Operator
from bpy.props import (
StringProperty,
@@ -41,6 +41,7 @@ from bpy.props import (
from mathutils import Vector, Matrix
+
# import collections
@@ -60,7 +61,7 @@ def pov_define_mesh(mesh, verts, edges, faces, name, hide_geometry=True):
return mesh
-class POVRAY_OT_lathe_add(bpy.types.Operator):
+class POVRAY_OT_lathe_add(Operator):
"""Add the representation of POV lathe using a screw modifier."""
bl_idname = "pov.addlathe"
@@ -212,7 +213,7 @@ def pov_superellipsoid_define(context, op, ob):
bpy.ops.object.mode_set(mode="OBJECT")
-class POVRAY_OT_superellipsoid_add(bpy.types.Operator):
+class POVRAY_OT_superellipsoid_add(Operator):
"""Add the representation of POV superellipsoid using the pov_superellipsoid_define() function."""
bl_idname = "pov.addsuperellipsoid"
@@ -286,7 +287,7 @@ class POVRAY_OT_superellipsoid_add(bpy.types.Operator):
return {'FINISHED'}
-class POVRAY_OT_superellipsoid_update(bpy.types.Operator):
+class POVRAY_OT_superellipsoid_update(Operator):
"""Update the superellipsoid.
Delete its previous proxy geometry and rerun pov_superellipsoid_define() function
@@ -455,7 +456,7 @@ def pov_supertorus_define(context, op, ob):
ob.pov.st_edit = st_edit
-class POVRAY_OT_supertorus_add(bpy.types.Operator):
+class POVRAY_OT_supertorus_add(Operator):
"""Add the representation of POV supertorus using the pov_supertorus_define() function."""
bl_idname = "pov.addsupertorus"
@@ -530,7 +531,7 @@ class POVRAY_OT_supertorus_add(bpy.types.Operator):
return {'FINISHED'}
-class POVRAY_OT_supertorus_update(bpy.types.Operator):
+class POVRAY_OT_supertorus_update(Operator):
"""Update the supertorus.
Delete its previous proxy geometry and rerun pov_supetorus_define() function
@@ -566,7 +567,7 @@ class POVRAY_OT_supertorus_update(bpy.types.Operator):
#########################################################################################################
-class POVRAY_OT_loft_add(bpy.types.Operator):
+class POVRAY_OT_loft_add(Operator):
"""Create the representation of POV loft using Blender curves."""
bl_idname = "pov.addloft"
@@ -695,7 +696,7 @@ class POVRAY_OT_loft_add(bpy.types.Operator):
return {'FINISHED'}
-class POVRAY_OT_plane_add(bpy.types.Operator):
+class POVRAY_OT_plane_add(Operator):
"""Add the representation of POV infinite plane using just a very big Blender Plane.
Flag its primitive type with a specific pov.object_as attribute and lock edit mode
@@ -725,7 +726,7 @@ class POVRAY_OT_plane_add(bpy.types.Operator):
return {'FINISHED'}
-class POVRAY_OT_box_add(bpy.types.Operator):
+class POVRAY_OT_box_add(Operator):
"""Add the representation of POV box using a simple Blender mesh cube.
Flag its primitive type with a specific pov.object_as attribute and lock edit mode
@@ -796,7 +797,7 @@ def pov_cylinder_define(context, op, ob, radius, loc, loc_cap):
bpy.ops.object.shade_smooth()
-class POVRAY_OT_cylinder_add(bpy.types.Operator):
+class POVRAY_OT_cylinder_add(Operator):
"""Add the representation of POV cylinder using pov_cylinder_define() function.
Use imported_cyl_loc when this operator is run by POV importer."""
@@ -846,7 +847,7 @@ class POVRAY_OT_cylinder_add(bpy.types.Operator):
return {'FINISHED'}
-class POVRAY_OT_cylinder_update(bpy.types.Operator):
+class POVRAY_OT_cylinder_update(Operator):
"""Update the POV cylinder.
Delete its previous proxy geometry and rerun pov_cylinder_define() function
@@ -932,7 +933,7 @@ def pov_sphere_define(context, op, ob, loc):
bpy.ops.object.mode_set(mode="OBJECT")
-class POVRAY_OT_sphere_add(bpy.types.Operator):
+class POVRAY_OT_sphere_add(Operator):
"""Add the representation of POV sphere using pov_sphere_define() function.
Use imported_loc when this operator is run by POV importer."""
@@ -989,7 +990,7 @@ class POVRAY_OT_sphere_add(bpy.types.Operator):
# return {'FINISHED'}
-class POVRAY_OT_sphere_update(bpy.types.Operator):
+class POVRAY_OT_sphere_update(Operator):
"""Update the POV sphere.
Delete its previous proxy geometry and rerun pov_sphere_define() function
@@ -1084,7 +1085,7 @@ def pov_cone_define(context, op, ob):
ob.pov.cone_cap_z = zc
-class POVRAY_OT_cone_add(bpy.types.Operator):
+class POVRAY_OT_cone_add(Operator):
"""Add the representation of POV cone using pov_cone_define() function."""
bl_idname = "pov.cone_add"
@@ -1139,7 +1140,7 @@ class POVRAY_OT_cone_add(bpy.types.Operator):
return {'FINISHED'}
-class POVRAY_OT_cone_update(bpy.types.Operator):
+class POVRAY_OT_cone_update(Operator):
"""Update the POV cone.
Delete its previous proxy geometry and rerun pov_cone_define() function
@@ -1177,7 +1178,7 @@ class POVRAY_OT_cone_update(bpy.types.Operator):
########################################ISOSURFACES##################################
-class POVRAY_OT_isosurface_box_add(bpy.types.Operator):
+class POVRAY_OT_isosurface_box_add(Operator):
"""Add the representation of POV isosurface box using also just a Blender mesh cube.
Flag its primitive type with a specific pov.object_as attribute and lock edit mode
@@ -1207,7 +1208,7 @@ class POVRAY_OT_isosurface_box_add(bpy.types.Operator):
return {'FINISHED'}
-class POVRAY_OT_isosurface_sphere_add(bpy.types.Operator):
+class POVRAY_OT_isosurface_sphere_add(Operator):
"""Add the representation of POV isosurface sphere by a Blender mesh icosphere.
Flag its primitive type with a specific pov.object_as attribute and lock edit mode
@@ -1238,7 +1239,7 @@ class POVRAY_OT_isosurface_sphere_add(bpy.types.Operator):
return {'FINISHED'}
-class POVRAY_OT_sphere_sweep_add(bpy.types.Operator):
+class POVRAY_OT_sphere_sweep_add(Operator):
"""Add the representation of POV sphere_sweep using a Blender NURBS curve.
Flag its primitive type with a specific ob.pov.curveshape attribute and
@@ -1264,7 +1265,7 @@ class POVRAY_OT_sphere_sweep_add(bpy.types.Operator):
return {'FINISHED'}
-class POVRAY_OT_blob_add(bpy.types.Operator):
+class POVRAY_OT_blob_add(Operator):
"""Add the representation of POV blob using a Blender meta ball.
No need to flag its primitive type as meta are exported to blobs
@@ -1284,7 +1285,7 @@ class POVRAY_OT_blob_add(bpy.types.Operator):
return {'FINISHED'}
-class POVRAY_OT_rainbow_add(bpy.types.Operator):
+class POVRAY_OT_rainbow_add(Operator):
"""Add the representation of POV rainbow using a Blender spot light.
Rainbows indeed propagate along a visibility cone.
@@ -1334,7 +1335,7 @@ class POVRAY_OT_height_field_add(bpy.types.Operator, ImportHelper):
bl_idname = "pov.addheightfield"
bl_label = "Height Field"
- bl_description = "Add Height Field "
+ bl_description = "Add Height Field"
bl_options = {'REGISTER', 'UNDO'}
# XXX Keep it in sync with __init__'s hf Primitive
@@ -1470,7 +1471,7 @@ def pov_torus_define(context, op, ob):
bpy.ops.object.mode_set(mode="OBJECT")
-class POVRAY_OT_torus_add(bpy.types.Operator):
+class POVRAY_OT_torus_add(Operator):
"""Add the representation of POV torus using using pov_torus_define() function."""
bl_idname = "pov.addtorus"
@@ -1503,7 +1504,7 @@ class POVRAY_OT_torus_add(bpy.types.Operator):
return {'FINISHED'}
-class POVRAY_OT_torus_update(bpy.types.Operator):
+class POVRAY_OT_torus_update(Operator):
"""Update the POV torus.
Delete its previous proxy geometry and rerun pov_torus_define() function
@@ -1536,7 +1537,7 @@ class POVRAY_OT_torus_update(bpy.types.Operator):
###################################################################################
-class POVRAY_OT_prism_add(bpy.types.Operator):
+class POVRAY_OT_prism_add(Operator):
"""Add the representation of POV prism using using an extruded curve."""
bl_idname = "pov.addprism"
@@ -1662,7 +1663,7 @@ def pov_parametric_define(context, op, ob):
bpy.ops.object.mode_set(mode="OBJECT")
-class POVRAY_OT_parametric_add(bpy.types.Operator):
+class POVRAY_OT_parametric_add(Operator):
"""Add the representation of POV parametric surfaces using pov_parametric_define() function."""
bl_idname = "pov.addparametric"
@@ -1698,7 +1699,7 @@ class POVRAY_OT_parametric_add(bpy.types.Operator):
return {'FINISHED'}
-class POVRAY_OT_parametric_update(bpy.types.Operator):
+class POVRAY_OT_parametric_update(Operator):
"""Update the representation of POV parametric surfaces.
Delete its previous proxy geometry and rerun pov_parametric_define() function
@@ -1731,7 +1732,7 @@ class POVRAY_OT_parametric_update(bpy.types.Operator):
#######################################################################
-class POVRAY_OT_shape_polygon_to_circle_add(bpy.types.Operator):
+class POVRAY_OT_shape_polygon_to_circle_add(Operator):
"""Add the proxy mesh for POV Polygon to circle lofting macro"""
bl_idname = "pov.addpolygontocircle"
diff --git a/render_povray/render.py b/render_povray/render.py
index 6efe7291..f9de22e4 100644
--- a/render_povray/render.py
+++ b/render_povray/render.py
@@ -24,6 +24,10 @@ import os
import sys
import time
from math import atan, pi, degrees, sqrt, cos, sin
+####################
+## Faster mesh export
+import numpy as np
+####################
import re
import random
import platform #
@@ -69,6 +73,7 @@ def imageFormat(imgF):
def imgMap(ts):
"""Translate mapping type from Blender UI to POV syntax and return that string."""
image_map = ""
+ texdata = bpy.data.textures[ts.texture]
if ts.mapping == 'FLAT':
image_map = "map_type 0 "
elif ts.mapping == 'SPHERE':
@@ -82,9 +87,9 @@ def imgMap(ts):
# image_map = " map_type 3 "
# elif ts.mapping=="?":
# image_map = " map_type 4 "
- if ts.texture.use_interpolation:
+ if ts.use_interpolation: # Available if image sampling class reactivated?
image_map += " interpolate 2 "
- if ts.texture.extension == 'CLIP':
+ if texdata.extension == 'CLIP':
image_map += " once "
# image_map += "}"
# if ts.mapping=='CUBE':
@@ -110,12 +115,12 @@ def imgMapTransforms(ts):
image_map_transforms = (
"scale <%.4g,%.4g,%.4g> translate <%.4g,%.4g,%.4g>"
% (
- 1.0 / ts.scale.x,
- 1.0 / ts.scale.y,
- 1.0 / ts.scale.z,
- 0.5 - (0.5 / ts.scale.x) - (ts.offset.x),
- 0.5 - (0.5 / ts.scale.y) - (ts.offset.y),
- ts.offset.z,
+ ts.scale[0],
+ ts.scale[1],
+ ts.scale[2],
+ ts.offset[0],
+ ts.offset[1],
+ ts.offset[2],
)
)
# image_map_transforms = (" translate <-0.5,-0.5,0.0> scale <%.4g,%.4g,%.4g> translate <%.4g,%.4g,%.4g>" % \
@@ -137,6 +142,7 @@ def imgMapTransforms(ts):
def imgMapBG(wts):
"""Translate world mapping from Blender UI to POV syntax and return that string."""
+ tex = bpy.data.textures[wts.texture]
image_mapBG = ""
# texture_coords refers to the mapping of world textures:
if wts.texture_coords == 'VIEW' or wts.texture_coords == 'GLOBAL':
@@ -146,9 +152,9 @@ def imgMapBG(wts):
elif wts.texture_coords == 'TUBE':
image_mapBG = " map_type 2 "
- if wts.texture.use_interpolation:
+ if tex.use_interpolation:
image_mapBG += " interpolate 2 "
- if wts.texture.extension == 'CLIP':
+ if tex.extension == 'CLIP':
image_mapBG += " once "
# image_mapBG += "}"
# if wts.mapping == 'CUBE':
@@ -386,6 +392,7 @@ def write_object_modifiers(scene, ob, File):
def write_pov(filename, scene=None, info_callback=None):
"""Main export process from Blender UI to POV syntax and write to exported file """
+
import mathutils
# file = filename
@@ -690,7 +697,7 @@ def write_pov(filename, scene=None, info_callback=None):
tabWrite("right <%s, 0, 0>\n" % -Qsize)
tabWrite("up <0, 1, 0>\n")
tabWrite(
- "angle %f\n" % (360.0 * atan(16.0 / camera.data.lens) / pi)
+ "angle %f\n" % ( 2 * atan(camera.data.sensor_width / 2 / camera.data.lens) * 180.0 / pi )
)
tabWrite(
@@ -737,6 +744,7 @@ def write_pov(filename, scene=None, info_callback=None):
def exportLamps(lamps):
"""Translate lights from Blender UI to POV syntax and write to exported file."""
+
# Incremented after each lamp export to declare its target
# currently used for Fresnel diffuse shader as their slope vector:
global lampCount
@@ -2346,6 +2354,64 @@ def write_pov(filename, scene=None, info_callback=None):
def exportMeshes(scene, sel, csg):
"""write all meshes as POV mesh2{} syntax to exported file """
+ #some numpy functions to speed up mesh export
+
+ # TODO: also write a numpy function to read matrices at object level?
+ # feed below with mesh object.data, but only after doing data.calc_loop_triangles()
+ def read_verts_co(self, mesh):
+ #'float64' would be a slower 64-bit floating-point number numpy datatype
+ # using 'float32' vert coordinates for now until any issue is reported
+ mverts_co = np.zeros((len(mesh.vertices)*3), dtype=np.float32)
+ mesh.vertices.foreach_get("co", mverts_co)
+ return np.reshape(mverts_co, (len(mesh.vertices), 3))
+
+ def read_verts_idx(self, mesh):
+ mverts_idx = np.zeros((len(mesh.vertices)), dtype=np.int64)
+ mesh.vertices.foreach_get("index", mverts_idx)
+ return np.reshape(mverts_idx, (len(mesh.vertices), 1))
+
+ def read_verts_norms(self, mesh):
+ #'float64' would be a slower 64-bit floating-point number numpy datatype
+ # using less accurate 'float16' normals for now until any issue is reported
+ mverts_no = np.zeros((len(mesh.vertices)*3), dtype=np.float16)
+ mesh.vertices.foreach_get("normal", mverts_no)
+ return np.reshape(mverts_no, (len(mesh.vertices), 3))
+
+ def read_faces_idx(self, mesh):
+ mfaces_idx = np.zeros((len(mesh.loop_triangles)), dtype=np.int64)
+ mesh.loop_triangles.foreach_get("index", mfaces_idx)
+ return np.reshape(mfaces_idx, (len(mesh.loop_triangles), 1))
+
+ def read_faces_verts_indices(self, mesh):
+ mfaces_verts_idx = np.zeros((len(mesh.loop_triangles)*3), dtype=np.int64)
+ mesh.loop_triangles.foreach_get("vertices", mfaces_verts_idx)
+ return np.reshape(mfaces_verts_idx, (len(mesh.loop_triangles), 3))
+
+ #Why is below different from verex indices?
+ def read_faces_verts_loops(self, mesh):
+ mfaces_verts_loops = np.zeros((len(mesh.loop_triangles)*3), dtype=np.int64)
+ mesh.loop_triangles.foreach_get("loops", mfaces_verts_loops)
+ return np.reshape(mfaces_verts_loops, (len(mesh.loop_triangles), 3))
+
+ def read_faces_norms(self, mesh):
+ #'float64' would be a slower 64-bit floating-point number numpy datatype
+ # using less accurate 'float16' normals for now until any issue is reported
+ mfaces_no = np.zeros((len(mesh.loop_triangles)*3), dtype=np.float16)
+ mesh.loop_triangles.foreach_get("normal", mfaces_no)
+ return np.reshape(mfaces_no, (len(mesh.loop_triangles), 3))
+
+ def read_faces_smooth(self, mesh):
+ mfaces_smth = np.zeros((len(mesh.loop_triangles)*1), dtype=np.bool)
+ mesh.loop_triangles.foreach_get("use_smooth", mfaces_smth)
+ return np.reshape(mfaces_smth, (len(mesh.loop_triangles), 1))
+
+ def read_faces_material_indices(self, mesh):
+ mfaces_mats_idx = np.zeros((len(mesh.loop_triangles)), dtype=np.int16)
+ mesh.loop_triangles.foreach_get("material_index", mfaces_mats_idx)
+ return np.reshape(mfaces_mats_idx, (len(mesh.loop_triangles), 1))
+
+
+
# obmatslist = []
# def hasUniqueMaterial():
# # Grab materials attached to object instances ...
@@ -2684,6 +2750,7 @@ def write_pov(filename, scene=None, info_callback=None):
pmaterial = ob.material_slots[
pSys.settings.material - 1
].material
+ #XXX Todo: replace by pov_(Particles?)_texture_slot
for th in pmaterial.texture_slots:
if th and th.use:
if (
@@ -3411,6 +3478,8 @@ def write_pov(filename, scene=None, info_callback=None):
ob.pov.v_max,
)
)
+ # Previous to 3.8 default max_gradient 1.0 was too slow
+ tabWrite("max_gradient 0.001\n")
if ob.pov.contained_by == "sphere":
tabWrite("contained_by { sphere{0, 2} }\n")
else:
@@ -3593,6 +3662,14 @@ def write_pov(filename, scene=None, info_callback=None):
me.calc_loop_triangles()
me_materials = me.materials
me_faces = me.loop_triangles[:]
+ ## numpytest
+ #me_looptris = me.loops
+
+ ## otypes = ['int32'] is a 32-bit signed integer number numpy datatype
+ #get_v_index = np.vectorize(lambda l: l.vertex_index, otypes = ['int32'], cache = True)
+ #faces_verts_idx = get_v_index(me_looptris)
+
+
# if len(me_faces)==0:
# tabWrite("\n//dummy sphere to represent empty mesh location\n")
# tabWrite("#declare %s =sphere {<0, 0, 0>,0 pigment{rgbt 1} no_image no_reflection no_radiosity photons{pass_through collect off} hollow}\n" % povdataname)
@@ -4092,6 +4169,7 @@ def write_pov(filename, scene=None, info_callback=None):
else:
shading.writeTextureInfluence(
+ using_uberpov,
mater,
materialNames,
LocalMaterialNames,
@@ -4581,89 +4659,91 @@ def write_pov(filename, scene=None, info_callback=None):
for (
t
) in (
- world.texture_slots
+ world.pov_texture_slots
): # risk to write several sky_spheres but maybe ok.
- if t and t.texture.type is not None:
+ if t:
+ tex = bpy.data.textures[t.texture]
+ if tex.type is not None:
worldTexCount += 1
- # XXX No enable checkbox for world textures yet (report it?)
- # if t and t.texture.type == 'IMAGE' and t.use:
- if t and t.texture.type == 'IMAGE':
- image_filename = path_image(t.texture.image)
- if t.texture.image.filepath != image_filename:
- t.texture.image.filepath = image_filename
- if image_filename != "" and t.use_map_blend:
- texturesBlend = image_filename
- # colvalue = t.default_value
- t_blend = t
-
- # Commented below was an idea to make the Background image oriented as camera
- # taken here:
- # http://news.pov.org/pov.newusers/thread/%3Cweb.4a5cddf4e9c9822ba2f93e20@news.pov.org%3E/
- # Replace 4/3 by the ratio of each image found by some custom or existing
- # function
- # mappingBlend = (" translate <%.4g,%.4g,%.4g> rotate z*degrees" \
- # "(atan((camLocation - camLookAt).x/(camLocation - " \
- # "camLookAt).y)) rotate x*degrees(atan((camLocation - " \
- # "camLookAt).y/(camLocation - camLookAt).z)) rotate y*" \
- # "degrees(atan((camLocation - camLookAt).z/(camLocation - " \
- # "camLookAt).x)) scale <%.4g,%.4g,%.4g>b" % \
- # (t_blend.offset.x / 10 , t_blend.offset.y / 10 ,
- # t_blend.offset.z / 10, t_blend.scale.x ,
- # t_blend.scale.y , t_blend.scale.z))
- # using camera rotation valuesdirectly from blender seems much easier
- if t_blend.texture_coords == 'ANGMAP':
- mappingBlend = ""
- else:
- # POV-Ray "scale" is not a number of repetitions factor, but its
- # inverse, a standard scale factor.
- # 0.5 Offset is needed relatively to scale because center of the
- # UV scale is 0.5,0.5 in blender and 0,0 in POV
- # Further Scale by 2 and translate by -1 are
- # required for the sky_sphere not to repeat
-
- mappingBlend = (
- "scale 2 scale <%.4g,%.4g,%.4g> translate -1 "
- "translate <%.4g,%.4g,%.4g> rotate<0,0,0> "
+ # XXX No enable checkbox for world textures yet (report it?)
+ # if t and tex.type == 'IMAGE' and t.use:
+ if tex.type == 'IMAGE':
+ image_filename = path_image(tex.image)
+ if tex.image.filepath != image_filename:
+ tex.image.filepath = image_filename
+ if image_filename != "" and t.use_map_blend:
+ texturesBlend = image_filename
+ # colvalue = t.default_value
+ t_blend = t
+
+ # Commented below was an idea to make the Background image oriented as camera
+ # taken here:
+ # http://news.pov.org/pov.newusers/thread/%3Cweb.4a5cddf4e9c9822ba2f93e20@news.pov.org%3E/
+ # Replace 4/3 by the ratio of each image found by some custom or existing
+ # function
+ # mappingBlend = (" translate <%.4g,%.4g,%.4g> rotate z*degrees" \
+ # "(atan((camLocation - camLookAt).x/(camLocation - " \
+ # "camLookAt).y)) rotate x*degrees(atan((camLocation - " \
+ # "camLookAt).y/(camLocation - camLookAt).z)) rotate y*" \
+ # "degrees(atan((camLocation - camLookAt).z/(camLocation - " \
+ # "camLookAt).x)) scale <%.4g,%.4g,%.4g>b" % \
+ # (t_blend.offset.x / 10 , t_blend.offset.y / 10 ,
+ # t_blend.offset.z / 10, t_blend.scale.x ,
+ # t_blend.scale.y , t_blend.scale.z))
+ # using camera rotation valuesdirectly from blender seems much easier
+ if t_blend.texture_coords == 'ANGMAP':
+ mappingBlend = ""
+ else:
+ # POV-Ray "scale" is not a number of repetitions factor, but its
+ # inverse, a standard scale factor.
+ # 0.5 Offset is needed relatively to scale because center of the
+ # UV scale is 0.5,0.5 in blender and 0,0 in POV
+ # Further Scale by 2 and translate by -1 are
+ # required for the sky_sphere not to repeat
+
+ mappingBlend = (
+ "scale 2 scale <%.4g,%.4g,%.4g> translate -1 "
+ "translate <%.4g,%.4g,%.4g> rotate<0,0,0> "
+ % (
+ (1.0 / t_blend.scale.x),
+ (1.0 / t_blend.scale.y),
+ (1.0 / t_blend.scale.z),
+ 0.5
+ - (0.5 / t_blend.scale.x)
+ - t_blend.offset.x,
+ 0.5
+ - (0.5 / t_blend.scale.y)
+ - t_blend.offset.y,
+ t_blend.offset.z,
+ )
+ )
+
+ # The initial position and rotation of the pov camera is probably creating
+ # the rotation offset should look into it someday but at least background
+ # won't rotate with the camera now.
+ # Putting the map on a plane would not introduce the skysphere distortion and
+ # allow for better image scale matching but also some waay to chose depth and
+ # size of the plane relative to camera.
+ tabWrite("sky_sphere {\n")
+ tabWrite("pigment {\n")
+ tabWrite(
+ "image_map{%s \"%s\" %s}\n"
% (
- (1.0 / t_blend.scale.x),
- (1.0 / t_blend.scale.y),
- (1.0 / t_blend.scale.z),
- 0.5
- - (0.5 / t_blend.scale.x)
- - t_blend.offset.x,
- 0.5
- - (0.5 / t_blend.scale.y)
- - t_blend.offset.y,
- t_blend.offset.z,
+ imageFormat(texturesBlend),
+ texturesBlend,
+ imgMapBG(t_blend),
)
)
-
- # The initial position and rotation of the pov camera is probably creating
- # the rotation offset should look into it someday but at least background
- # won't rotate with the camera now.
- # Putting the map on a plane would not introduce the skysphere distortion and
- # allow for better image scale matching but also some waay to chose depth and
- # size of the plane relative to camera.
- tabWrite("sky_sphere {\n")
- tabWrite("pigment {\n")
- tabWrite(
- "image_map{%s \"%s\" %s}\n"
- % (
- imageFormat(texturesBlend),
- texturesBlend,
- imgMapBG(t_blend),
+ tabWrite("}\n")
+ tabWrite("%s\n" % (mappingBlend))
+ # The following layered pigment opacifies to black over the texture for
+ # transmit below 1 or otherwise adds to itself
+ tabWrite(
+ "pigment {rgb 0 transmit %s}\n" % (tex.intensity)
)
- )
- tabWrite("}\n")
- tabWrite("%s\n" % (mappingBlend))
- # The following layered pigment opacifies to black over the texture for
- # transmit below 1 or otherwise adds to itself
- tabWrite(
- "pigment {rgb 0 transmit %s}\n" % (t.texture.intensity)
- )
- tabWrite("}\n")
- # tabWrite("scale 2\n")
- # tabWrite("translate -1\n")
+ tabWrite("}\n")
+ # tabWrite("scale 2\n")
+ # tabWrite("translate -1\n")
# For only Background gradient
@@ -4910,7 +4990,8 @@ def write_pov(filename, scene=None, info_callback=None):
"//--Exported with POV-Ray exporter for Blender--\n"
"//----------------------------------------------\n\n"
)
- file.write("#version 3.7;\n")
+ file.write("#version 3.7;\n") #Switch below as soon as 3.8 beta gets easy linked
+ #file.write("#version 3.8;\n")
file.write(
"#declare Default_texture = texture{pigment {rgb 0.8} "
"finish {brilliance 3.8} }\n\n"
@@ -5066,7 +5147,11 @@ def write_pov(filename, scene=None, info_callback=None):
if comments:
file.write("//--Mesh objects--\n")
+
+ #tbefore = time.time()
exportMeshes(scene, sel, csg)
+ #totime = time.time() - tbefore
+ #print("exportMeshes took" + str(totime))
# What follow used to happen here:
# exportCamera()
@@ -5097,7 +5182,7 @@ def write_pov_ini(
y = int(render.resolution_y * render.resolution_percentage * 0.01)
file = open(filename_ini, "w")
- file.write("Version=3.7\n")
+ file.write("Version=3.8\n")
# write povray text stream to temporary file of same name with _log suffix
# file.write("All_File='%s'\n" % filename_log)
# DEBUG.OUT log if none specified:
@@ -5797,15 +5882,106 @@ class PovrayRender(bpy.types.RenderEngine):
# print(filename_log) #bring the pov log to blender console with proper path?
with open(
- self._temp_file_log
+ self._temp_file_log,
+ encoding='utf-8'
) as f: # The with keyword automatically closes the file when you are done
- print(f.read())
+ msg = f.read()
+ #if isinstance(msg, str):
+ #stdmsg = msg
+ #decoded = False
+ #else:
+ #if type(msg) == bytes:
+ #stdmsg = msg.split('\n')
+ #stdmsg = msg.encode('utf-8', "replace")
+ #stdmsg = msg.encode("utf-8", "replace")
+
+ #stdmsg = msg.decode(encoding)
+ #decoded = True
+ #msg.encode('utf-8').decode('utf-8')
+ print(msg)
+ # Also print to the interactive console used in POV centric workspace
+ # To do: get a grip on new line encoding
+ # and make this a function to be used elsewhere
+ for win in bpy.context.window_manager.windows:
+ if win.screen != None:
+ scr = win.screen
+ for area in scr.areas:
+ if area.type == 'CONSOLE':
+ #context override
+ #ctx = {'window': win, 'screen': scr, 'area':area}#bpy.context.copy()
+ ctx = {}
+ ctx['area'] = area
+ ctx['region'] = area.regions[-1]
+ ctx['space_data'] = area.spaces.active
+ ctx['screen'] = scr#C.screen
+ ctx['window'] = win
+
+ #bpy.ops.console.banner(ctx, text = "Hello world")
+ bpy.ops.console.clear_line(ctx)
+ stdmsg = msg.split('\n') #XXX todo , test and see
+ for i in stdmsg:
+ bpy.ops.console.insert(ctx, text = i)
self.update_stats("", "")
if scene.pov.tempfiles_enable or scene.pov.deletefiles_enable:
self._cleanup()
+ sound_on = bpy.context.preferences.addons[
+ __package__
+ ].preferences.use_sounds
+
+ if sys.platform[:3] == "win" and sound_on:
+ # Could not find tts Windows command so playing beeps instead :-)
+ # "Korobeiniki"(Коробе́йники)
+ # aka "A-Type" Tetris theme
+ import winsound
+ winsound.Beep(494,250) #B
+ winsound.Beep(370,125) #F
+ winsound.Beep(392,125) #G
+ winsound.Beep(440,250) #A
+ winsound.Beep(392,125) #G
+ winsound.Beep(370,125) #F#
+ winsound.Beep(330,275) #E
+ winsound.Beep(330,125) #E
+ winsound.Beep(392,125) #G
+ winsound.Beep(494,275) #B
+ winsound.Beep(440,125) #A
+ winsound.Beep(392,125) #G
+ winsound.Beep(370,275) #F
+ winsound.Beep(370,125) #F
+ winsound.Beep(392,125) #G
+ winsound.Beep(440,250) #A
+ winsound.Beep(494,250) #B
+ winsound.Beep(392,250) #G
+ winsound.Beep(330,350) #E
+ time.sleep(0.5)
+ winsound.Beep(440,250) #A
+ winsound.Beep(440,150) #A
+ winsound.Beep(523,125) #D8
+ winsound.Beep(659,250) #E8
+ winsound.Beep(587,125) #D8
+ winsound.Beep(523,125) #C8
+ winsound.Beep(494,250) #B
+ winsound.Beep(494,125) #B
+ winsound.Beep(392,125) #G
+ winsound.Beep(494,250) #B
+ winsound.Beep(440,150) #A
+ winsound.Beep(392,125) #G
+ winsound.Beep(370,250) #F#
+ winsound.Beep(370,125) #F#
+ winsound.Beep(392,125) #G
+ winsound.Beep(440,250) #A
+ winsound.Beep(494,250) #B
+ winsound.Beep(392,250) #G
+ winsound.Beep(330,300) #E
+
+ #Does Linux support say command?
+ elif sys.platform[:3] != "win" :
+ finished_render_message = "\'Render completed\'"
+ # We don't want the say command to block Python,
+ # so we add an ampersand after the message
+ os.system("say %s &" % (finished_render_message))
##################################################################################
#################################Operators########################################
@@ -5833,7 +6009,7 @@ class RenderPovTexturePreview(Operator):
outputPrevFile = os.path.join(preview_dir, texPrevName)
##################### ini ##########################################
fileIni = open("%s" % iniPrevFile, "w")
- fileIni.write('Version=3.7\n')
+ fileIni.write('Version=3.8\n')
fileIni.write('Input_File_Name="%s"\n' % inputPrevFile)
fileIni.write('Output_File_Name="%s.png"\n' % outputPrevFile)
fileIni.write('Library_Path="%s"\n' % preview_dir)
diff --git a/render_povray/shading.py b/render_povray/shading.py
index ac1f923f..a3f907dc 100644
--- a/render_povray/shading.py
+++ b/render_povray/shading.py
@@ -155,7 +155,7 @@ def writeMaterial(using_uberpov, DEF_MAT_NAME, scene, tabWrite, safety, comments
elif Level == 1:
if (material.pov.specular_shader == 'COOKTORR' or
material.pov.specular_shader == 'PHONG'):
- tabWrite("phong %.3g\n" % (material.pov.specular_intensity/5))
+ tabWrite("phong 0\n")#%.3g\n" % (material.pov.specular_intensity/5))
tabWrite("phong_size %.3g\n" % (material.pov.specular_hardness /3.14))
# POV-Ray 'specular' keyword corresponds to a Blinn model, without the ior.
@@ -183,8 +183,10 @@ def writeMaterial(using_uberpov, DEF_MAT_NAME, scene, tabWrite, safety, comments
# specular for some values.
tabWrite("brilliance %.4g\n" % (1.8 - material.pov.specular_slope * 1.8))
elif Level == 3:
- tabWrite("specular %.3g\n" % ((material.pov.specular_intensity*material.pov.specular_color.v)*5))
- tabWrite("roughness %.3g\n" % (1.1/material.pov.specular_hardness))
+ # Spec must be Max at Level 3 so that white of mixing texture always shows specularity
+ # That's why it's multiplied by 255. maybe replace by texture's brightest pixel value?
+ tabWrite("specular %.3g\n" % ((material.pov.specular_intensity*material.pov.specular_color.v)*(255* slot.specular_factor)))
+ tabWrite("roughness %.3g\n" % (1/material.pov.specular_hardness))
tabWrite("diffuse %.3g %.3g\n" % (frontDiffuse, backDiffuse))
tabWrite("ambient %.3g\n" % material.pov.ambient)
@@ -265,12 +267,12 @@ def writeMaterial(using_uberpov, DEF_MAT_NAME, scene, tabWrite, safety, comments
if material:
special_texture_found = False
- idx = -1
+ tmpidx = -1
for t in material.pov_texture_slots:
- idx += 1
+ tmpidx += 1
# index = material.pov.active_texture_index
- slot = material.pov_texture_slots[idx] # [index]
- povtex = slot.name
+ slot = material.pov_texture_slots[tmpidx] # [index]
+ povtex = slot.texture # slot.name
tex = bpy.data.textures[povtex]
if t and t.use and tex is not None:
@@ -777,7 +779,7 @@ def exportPattern(texture, string_strip_hyphen):
return(texStrg)
-def writeTextureInfluence(mater, materialNames, LocalMaterialNames, path_image, lampCount,
+def writeTextureInfluence(using_uberpov, mater, materialNames, LocalMaterialNames, path_image, lampCount,
imageFormat, imgMap, imgMapTransforms, tabWrite, comments,
string_strip_hyphen, safety, col, os, preview_dir, unpacked_images):
"""Translate Blender texture influences to various POV texture tricks and write to pov file."""
@@ -805,14 +807,15 @@ def writeTextureInfluence(mater, materialNames, LocalMaterialNames, path_image,
texturesNorm = ""
texturesAlpha = ""
#proceduralFlag=False
+ tmpidx = -1
for t in mater.pov_texture_slots:
- idx = -1
- for t in mater.pov_texture_slots:
- idx += 1
- # index = mater.pov.active_texture_index
- slot = mater.pov_texture_slots[idx] # [index]
- povtex = slot.name
- tex = bpy.data.textures[povtex]
+
+
+ tmpidx += 1
+ # index = mater.pov.active_texture_index
+ slot = mater.pov_texture_slots[tmpidx] # [index]
+ povtex = slot.texture # slot.name
+ tex = bpy.data.textures[povtex]
if t and (t.use and (tex is not None)):
# 'NONE' ('NONE' type texture is different from no texture covered above)
@@ -876,11 +879,13 @@ def writeTextureInfluence(mater, materialNames, LocalMaterialNames, path_image,
# IMAGE SEQUENCE ENDS
imgGamma = ""
if image_filename:
+ texdata = bpy.data.textures[t.texture]
if t.use_map_color_diffuse:
texturesDif = image_filename
# colvalue = t.default_value # UNUSED
t_dif = t
- if t_dif.texture.pov.tex_gamma_enable:
+ print (texdata)
+ if texdata.pov.tex_gamma_enable:
imgGamma = (" gamma %.3g " % t_dif.texture.pov.tex_gamma_value)
if t.use_map_specular or t.use_map_raymir:
texturesSpec = image_filename
@@ -913,6 +918,7 @@ def writeTextureInfluence(mater, materialNames, LocalMaterialNames, path_image,
if mater.pov.replacement_text != "":
tabWrite("%s\n" % mater.pov.replacement_text)
#################################################################################
+ # XXX TODO: replace by new POV MINNAERT rather than aoi
if mater.pov.diffuse_shader == 'MINNAERT':
tabWrite("\n")
tabWrite("aoi\n")
@@ -1056,12 +1062,36 @@ def writeTextureInfluence(mater, materialNames, LocalMaterialNames, path_image,
mappingNor =imgMapTransforms(t_nor)
if texturesNorm and texturesNorm.startswith("PAT_"):
- tabWrite("normal{function{f%s(x,y,z).grey} bump_size %.4g %s}\n" %(texturesNorm, t_nor.normal_factor, mappingNor))
+ tabWrite("normal{function{f%s(x,y,z).grey} bump_size %.4g %s}\n" %(texturesNorm, ( - t_nor.normal_factor * 9.5), mappingNor))
else:
- tabWrite("normal {uv_mapping bump_map " \
- "{%s \"%s\" %s bump_size %.4g }%s}\n" % \
+ tabWrite("normal {\n")
+ # XXX TODO: fix and propagate the micro normals reflection blur below to non textured materials
+ if (mater.pov_raytrace_mirror.use and mater.pov_raytrace_mirror.gloss_factor < 1.0 and not using_uberpov):
+ tabWrite("average\n")
+ tabWrite("normal_map{\n")
+ # 0.5 for entries below means a 50 percent mix
+ # between the micro normal and user bump map
+ # order seems indifferent as commutative
+ tabWrite("[0.025 bumps %.4g scale 0.1*%.4g]\n" %((10/(mater.pov_raytrace_mirror.gloss_factor+0.01)),(10/(mater.pov_raytrace_mirror.gloss_samples+0.001)))) # micronormals blurring
+ tabWrite("[0.025 bumps %.4g scale 0.1*%.4g phase 0.1]\n" %((10/(mater.pov_raytrace_mirror.gloss_factor+0.01)),(1/(mater.pov_raytrace_mirror.gloss_samples+0.001)))) # micronormals blurring
+ tabWrite("[0.025 bumps %.4g scale 0.1*%.4g phase 0.15]\n" %((10/(mater.pov_raytrace_mirror.gloss_factor+0.01)),(1/(mater.pov_raytrace_mirror.gloss_samples+0.001)))) # micronormals blurring
+ tabWrite("[0.025 bumps %.4g scale 0.1*%.4g phase 0.2]\n" %((10/(mater.pov_raytrace_mirror.gloss_factor+0.01)),(1/(mater.pov_raytrace_mirror.gloss_samples+0.001)))) # micronormals blurring
+ tabWrite("[0.025 bumps %.4g scale 0.1*%.4g phase 0.25]\n" %((10/(mater.pov_raytrace_mirror.gloss_factor+0.01)),(1/(mater.pov_raytrace_mirror.gloss_samples+0.001)))) # micronormals blurring
+ tabWrite("[0.025 bumps %.4g scale 0.1*%.4g phase 0.3]\n" %((10/(mater.pov_raytrace_mirror.gloss_factor+0.01)),(1/(mater.pov_raytrace_mirror.gloss_samples+0.001)))) # micronormals blurring
+ tabWrite("[0.025 bumps %.4g scale 0.1*%.4g phase 0.35]\n" %((10/(mater.pov_raytrace_mirror.gloss_factor+0.01)),(1/(mater.pov_raytrace_mirror.gloss_samples+0.001)))) # micronormals blurring
+ tabWrite("[0.025 bumps %.4g scale 0.1*%.4g phase 0.4]\n" %((10/(mater.pov_raytrace_mirror.gloss_factor+0.01)),(1/(mater.pov_raytrace_mirror.gloss_samples+0.001)))) # micronormals blurring
+ tabWrite("[0.025 bumps %.4g scale 0.1*%.4g phase 0.45]\n" %((10/(mater.pov_raytrace_mirror.gloss_factor+0.01)),(1/(mater.pov_raytrace_mirror.gloss_samples+0.001)))) # micronormals blurring
+ tabWrite("[0.025 bumps %.4g scale 0.1*%.4g phase 0.5]\n" %((10/(mater.pov_raytrace_mirror.gloss_factor+0.01)),(1/(mater.pov_raytrace_mirror.gloss_samples+0.001)))) # micronormals blurring
+ tabWrite("[1.0 ") # Proceed with user bump...
+ tabWrite("uv_mapping bump_map " \
+ "{%s \"%s\" %s bump_size %.4g }%s" % \
(imageFormat(texturesNorm), texturesNorm, imgMap(t_nor),
- t_nor.normal_factor, mappingNor))
+ ( - t_nor.normal_factor * 9.5), mappingNor))
+ # ...Then close its last entry and the the normal_map itself
+ if (mater.pov_raytrace_mirror.use and mater.pov_raytrace_mirror.gloss_factor < 1.0 and not using_uberpov):
+ tabWrite("]}}\n")
+ else:
+ tabWrite("]}\n")
if texturesSpec != "":
tabWrite("]\n")
##################Second index for mapping specular max value###############
@@ -1094,6 +1124,35 @@ def writeTextureInfluence(mater, materialNames, LocalMaterialNames, path_image,
# Level 3 is full specular
tabWrite("finish {%s}\n" % (safety(material_finish, Level=3)))
+ if mater.pov_raytrace_mirror.use and mater.pov_raytrace_mirror.gloss_factor < 1.0 and not using_uberpov:
+ tabWrite("normal {\n")
+ tabWrite("average\n")
+ tabWrite("normal_map{\n")
+ # 0.5 for entries below means a 50 percent mix
+ # between the micro normal and user bump map
+ # order seems indifferent as commutative
+ tabWrite("[0.025 bumps %.4g scale 0.1*%.4g]\n" %((10/(mater.pov_raytrace_mirror.gloss_factor+0.01)),(10/(mater.pov_raytrace_mirror.gloss_samples+0.001)))) # micronormals blurring
+ tabWrite("[0.025 bumps %.4g scale 0.1*%.4g phase 0.1]\n" %((10/(mater.pov_raytrace_mirror.gloss_factor+0.01)),(1/(mater.pov_raytrace_mirror.gloss_samples+0.001)))) # micronormals blurring
+ tabWrite("[0.025 bumps %.4g scale 0.1*%.4g phase 0.15]\n" %((10/(mater.pov_raytrace_mirror.gloss_factor+0.01)),(1/(mater.pov_raytrace_mirror.gloss_samples+0.001)))) # micronormals blurring
+ tabWrite("[0.025 bumps %.4g scale 0.1*%.4g phase 0.2]\n" %((10/(mater.pov_raytrace_mirror.gloss_factor+0.01)),(1/(mater.pov_raytrace_mirror.gloss_samples+0.001)))) # micronormals blurring
+ tabWrite("[0.025 bumps %.4g scale 0.1*%.4g phase 0.25]\n" %((10/(mater.pov_raytrace_mirror.gloss_factor+0.01)),(1/(mater.pov_raytrace_mirror.gloss_samples+0.001)))) # micronormals blurring
+ tabWrite("[0.025 bumps %.4g scale 0.1*%.4g phase 0.3]\n" %((10/(mater.pov_raytrace_mirror.gloss_factor+0.01)),(1/(mater.pov_raytrace_mirror.gloss_samples+0.001)))) # micronormals blurring
+ tabWrite("[0.025 bumps %.4g scale 0.1*%.4g phase 0.35]\n" %((10/(mater.pov_raytrace_mirror.gloss_factor+0.01)),(1/(mater.pov_raytrace_mirror.gloss_samples+0.001)))) # micronormals blurring
+ tabWrite("[0.025 bumps %.4g scale 0.1*%.4g phase 0.4]\n" %((10/(mater.pov_raytrace_mirror.gloss_factor+0.01)),(1/(mater.pov_raytrace_mirror.gloss_samples+0.001)))) # micronormals blurring
+ tabWrite("[0.025 bumps %.4g scale 0.1*%.4g phase 0.45]\n" %((10/(mater.pov_raytrace_mirror.gloss_factor+0.01)),(1/(mater.pov_raytrace_mirror.gloss_samples+0.001)))) # micronormals blurring
+ tabWrite("[0.025 bumps %.4g scale 0.1*%.4g phase 0.5]\n" %((10/(mater.pov_raytrace_mirror.gloss_factor+0.01)),(1/(mater.pov_raytrace_mirror.gloss_samples+0.001)))) # micronormals blurring
+ #XXX IF USER BUMP_MAP
+ if texturesNorm != "":
+ tabWrite("[1.0 ") # Blurry reflection or not Proceed with user bump in either case...
+ tabWrite("uv_mapping bump_map " \
+ "{%s \"%s\" %s bump_size %.4g }%s]\n" % \
+ (imageFormat(texturesNorm), texturesNorm, imgMap(t_nor),
+ ( - t_nor.normal_factor * 9.5), mappingNor))
+ # ...Then close the normal_map itself if blurry reflection
+ if mater.pov_raytrace_mirror.use and mater.pov_raytrace_mirror.gloss_factor < 1.0 and not using_uberpov:
+ tabWrite("}}\n")
+ else:
+ tabWrite("}\n")
elif colored_specular_found:
# Level 1 is no specular
tabWrite("finish {%s}\n" % (safety(material_finish, Level=1)))
@@ -1166,11 +1225,36 @@ def writeTextureInfluence(mater, materialNames, LocalMaterialNames, path_image,
mappingNor =imgMapTransforms(t_nor)
if texturesNorm and texturesNorm.startswith("PAT_"):
- tabWrite("normal{function{f%s(x,y,z).grey} bump_size %.4g %s}\n" %(texturesNorm, t_nor.normal_factor, mappingNor))
+ tabWrite("normal{function{f%s(x,y,z).grey} bump_size %.4g %s}\n" %(texturesNorm, ( - t_nor.normal_factor * 9.5), mappingNor))
else:
- tabWrite("normal {uv_mapping bump_map {%s \"%s\" %s bump_size %.4g }%s}\n" % \
+ tabWrite("normal {\n")
+ # XXX TODO: fix and propagate the micro normals reflection blur below to non textured materials
+ if mater.pov_raytrace_mirror.use and mater.pov_raytrace_mirror.gloss_factor < 1.0 and not using_uberpov:
+ tabWrite("average\n")
+ tabWrite("normal_map{\n")
+ # 0.5 for entries below means a 50 percent mix
+ # between the micro normal and user bump map
+ # order seems indifferent as commutative
+ tabWrite("[0.025 bumps %.4g scale 0.1*%.4g]\n" %((10/(mater.pov_raytrace_mirror.gloss_factor+0.01)),(10/(mater.pov_raytrace_mirror.gloss_samples+0.001)))) # micronormals blurring
+ tabWrite("[0.025 bumps %.4g scale 0.1*%.4g phase 0.1]\n" %((10/(mater.pov_raytrace_mirror.gloss_factor+0.01)),(1/(mater.pov_raytrace_mirror.gloss_samples+0.001)))) # micronormals blurring
+ tabWrite("[0.025 bumps %.4g scale 0.1*%.4g phase 0.15]\n" %((10/(mater.pov_raytrace_mirror.gloss_factor+0.01)),(1/(mater.pov_raytrace_mirror.gloss_samples+0.001)))) # micronormals blurring
+ tabWrite("[0.025 bumps %.4g scale 0.1*%.4g phase 0.2]\n" %((10/(mater.pov_raytrace_mirror.gloss_factor+0.01)),(1/(mater.pov_raytrace_mirror.gloss_samples+0.001)))) # micronormals blurring
+ tabWrite("[0.025 bumps %.4g scale 0.1*%.4g phase 0.25]\n" %((10/(mater.pov_raytrace_mirror.gloss_factor+0.01)),(1/(mater.pov_raytrace_mirror.gloss_samples+0.001)))) # micronormals blurring
+ tabWrite("[0.025 bumps %.4g scale 0.1*%.4g phase 0.3]\n" %((10/(mater.pov_raytrace_mirror.gloss_factor+0.01)),(1/(mater.pov_raytrace_mirror.gloss_samples+0.001)))) # micronormals blurring
+ tabWrite("[0.025 bumps %.4g scale 0.1*%.4g phase 0.35]\n" %((10/(mater.pov_raytrace_mirror.gloss_factor+0.01)),(1/(mater.pov_raytrace_mirror.gloss_samples+0.001)))) # micronormals blurring
+ tabWrite("[0.025 bumps %.4g scale 0.1*%.4g phase 0.4]\n" %((10/(mater.pov_raytrace_mirror.gloss_factor+0.01)),(1/(mater.pov_raytrace_mirror.gloss_samples+0.001)))) # micronormals blurring
+ tabWrite("[0.025 bumps %.4g scale 0.1*%.4g phase 0.45]\n" %((10/(mater.pov_raytrace_mirror.gloss_factor+0.01)),(1/(mater.pov_raytrace_mirror.gloss_samples+0.001)))) # micronormals blurring
+ tabWrite("[0.025 bumps %.4g scale 0.1*%.4g phase 0.5]\n" %((10/(mater.pov_raytrace_mirror.gloss_factor+0.01)),(1/(mater.pov_raytrace_mirror.gloss_samples+0.001)))) # micronormals blurring
+ tabWrite("[1.0 ") # Blurry reflection or not Proceed with user bump in either case...
+ tabWrite("uv_mapping bump_map " \
+ "{%s \"%s\" %s bump_size %.4g }%s]\n" % \
(imageFormat(texturesNorm), texturesNorm, imgMap(t_nor),
- t_nor.normal_factor, mappingNor))
+ ( - t_nor.normal_factor * 9.5), mappingNor))
+ # ...Then close the normal_map itself if blurry reflection
+ if mater.pov_raytrace_mirror.use and mater.pov_raytrace_mirror.gloss_factor < 1.0 and not using_uberpov:
+ tabWrite("}}\n")
+ else:
+ tabWrite("}\n")
if texturesSpec != "" and mater.pov.replacement_text == "":
tabWrite("]\n")
@@ -1201,12 +1285,12 @@ def writeTextureInfluence(mater, materialNames, LocalMaterialNames, path_image,
# Write another layered texture using invisible diffuse and metallic trick
# to emulate colored specular highlights
special_texture_found = False
- idx = -1
+ tmpidx = -1
for t in mater.pov_texture_slots:
- idx += 1
+ tmpidx += 1
# index = mater.pov.active_texture_index
- slot = mater.pov_texture_slots[idx] # [index]
- povtex = slot.name
+ slot = mater.pov_texture_slots[tmpidx] # [index]
+ povtex = slot.texture # slot.name
tex = bpy.data.textures[povtex]
if(t and t.use and ((tex.type == 'IMAGE' and tex.image) or tex.type != 'IMAGE') and
(t.use_map_specular or t.use_map_raymir)):
@@ -1240,7 +1324,7 @@ def writeTextureInfluence(mater, materialNames, LocalMaterialNames, path_image,
if image_filename:
if t.use_map_normal:
texturesNorm = image_filename
- # colvalue = t.normal_factor/10 # UNUSED
+ # colvalue = t.normal_factor/10 # UNUSED XXX *-9.5 !
#textNormName=tex.image.name + ".normal"
#was the above used? --MR
t_nor = t
@@ -1248,13 +1332,13 @@ def writeTextureInfluence(mater, materialNames, LocalMaterialNames, path_image,
tabWrite("normal{function" \
"{f%s(x,y,z).grey} bump_size %.4g}\n" % \
(texturesNorm,
- t_nor.normal_factor))
+ ( - t_nor.normal_factor * 9.5)))
else:
tabWrite("normal {uv_mapping bump_map " \
"{%s \"%s\" %s bump_size %.4g }%s}\n" % \
(imageFormat(texturesNorm),
texturesNorm, imgMap(t_nor),
- t_nor.normal_factor,
+ ( - t_nor.normal_factor * 9.5),
mappingNor))
tabWrite("}\n") # THEN IT CAN CLOSE LAST LAYER OF TEXTURE
diff --git a/render_povray/ui.py b/render_povray/ui.py
index e15a9374..6bdadccf 100644
--- a/render_povray/ui.py
+++ b/render_povray/ui.py
@@ -22,11 +22,15 @@
import bpy
import sys # really import here and in render.py?
import os # really import here and in render.py?
+import addon_utils
+from time import sleep
from os.path import isfile
+from bpy.app.handlers import persistent
from bl_operators.presets import AddPresetBase
from bpy.utils import register_class, unregister_class
from bpy.types import (
Operator,
+ Menu,
UIList,
Panel,
Brush,
@@ -43,17 +47,29 @@ from bl_ui import properties_output
for member in dir(properties_output):
subclass = getattr(properties_output, member)
try:
- subclass.COMPAT_ENGINES.add('POVRAY')
+ subclass.COMPAT_ENGINES.add('POVRAY_RENDER')
except:
pass
del properties_output
+from bl_ui import properties_freestyle
+for member in dir(properties_freestyle):
+ subclass = getattr(properties_freestyle, member)
+ try:
+ if not (subclass.bl_space_type == 'PROPERTIES'
+ and subclass.bl_context == "render"):
+ subclass.COMPAT_ENGINES.add('POVRAY_RENDER')
+ #subclass.bl_parent_id = "RENDER_PT_POV_filter"
+ except:
+ pass
+del properties_freestyle
+
from bl_ui import properties_view_layer
for member in dir(properties_view_layer):
subclass = getattr(properties_view_layer, member)
try:
- subclass.COMPAT_ENGINES.add('POVRAY')
+ subclass.COMPAT_ENGINES.add('POVRAY_RENDER')
except:
pass
del properties_view_layer
@@ -242,19 +258,136 @@ for member in dir(
pass
del properties_particle
-# Example of wrapping every class 'as is'
-from bl_ui import properties_output
-for member in dir(properties_output):
- subclass = getattr(properties_output, member)
- try:
- subclass.COMPAT_ENGINES.add('POVRAY_RENDER')
- except:
- pass
-del properties_output
+############# POV-Centric WORSPACE #############
+@persistent
+def povCentricWorkspace(dummy):
+ """Set up a POV centric Workspace if addon was activated and saved as default renderer
+
+ This would bring a ’_RestrictData’ error because UI needs to be fully loaded before
+ workspace changes so registering this function in bpy.app.handlers is needed.
+ By default handlers are freed when loading new files, but here we want the handler
+ to stay running across multiple files as part of this add-on. That is why the the
+ bpy.app.handlers.persistent decorator is used (@persistent) above.
+ """
+
+ wsp = bpy.data.workspaces.get('Scripting')
+ context = bpy.context
+ if wsp is not None and context.scene.render.engine == 'POVRAY_RENDER':
+ new_wsp = bpy.ops.workspace.duplicate({'workspace': wsp})
+ bpy.data.workspaces['Scripting.001'].name='POV'
+ # Already done it would seem, but explicitly make this workspaces the active one
+ context.window.workspace = bpy.data.workspaces['POV']
+ pov_screen = bpy.data.workspaces['POV'].screens[0]
+ pov_workspace = pov_screen.areas
+
+
+ override = bpy.context.copy()
+
+ for area in pov_workspace:
+ if area.type == 'VIEW_3D':
+ for region in [r for r in area.regions if r.type == 'WINDOW']:
+ for space in area.spaces:
+ if space.type == 'VIEW_3D':
+ #override['screen'] = pov_screen
+ override['area'] = area
+ override['region']= region
+ #bpy.data.workspaces['POV'].screens[0].areas[6].spaces[0].width = 333 # Read only, how do we set ?
+ #This has a glitch:
+ #bpy.ops.screen.area_move(override, x=(area.x + area.width), y=(area.y + 5), delta=100)
+ #bpy.ops.screen.area_move(override, x=(area.x + 5), y=area.y, delta=-100)
+
+ bpy.ops.screen.space_type_set_or_cycle(override, space_type = 'TEXT_EDITOR')
+ space.show_region_ui = True
+ #bpy.ops.screen.region_scale(override)
+ #bpy.ops.screen.region_scale()
+ break
+
+ elif area.type == 'CONSOLE':
+ for region in [r for r in area.regions if r.type == 'WINDOW']:
+ for space in area.spaces:
+ if space.type == 'CONSOLE':
+ #override['screen'] = pov_screen
+ override['area'] = area
+ override['region']= region
+ bpy.ops.screen.space_type_set_or_cycle(override, space_type = 'INFO')
+
+ break
+ elif area.type == 'INFO':
+ for region in [r for r in area.regions if r.type == 'WINDOW']:
+ for space in area.spaces:
+ if space.type == 'INFO':
+ #override['screen'] = pov_screen
+ override['area'] = area
+ override['region']= region
+ bpy.ops.screen.space_type_set_or_cycle(override, space_type = 'CONSOLE')
+
+ break
+
+ elif area.type == 'TEXT_EDITOR':
+ for region in [r for r in area.regions if r.type == 'WINDOW']:
+ for space in area.spaces:
+ if space.type == 'TEXT_EDITOR':
+ #override['screen'] = pov_screen
+ override['area'] = area
+ override['region']= region
+ #bpy.ops.screen.space_type_set_or_cycle(space_type='VIEW_3D')
+ #space.type = 'VIEW_3D'
+ bpy.ops.screen.space_type_set_or_cycle(override, space_type = 'VIEW_3D')
+
+ #bpy.ops.screen.area_join(override, cursor=(area.x, area.y + area.height))
+
+ break
+
+
+ if area.type == 'VIEW_3D':
+ for region in [r for r in area.regions if r.type == 'WINDOW']:
+ for space in area.spaces:
+ if space.type == 'VIEW_3D':
+ #override['screen'] = pov_screen
+ override['area'] = area
+ override['region']= region
+ bpy.ops.screen.region_quadview(override)
+ space.region_3d.view_perspective = 'CAMERA'
+ #bpy.ops.screen.space_type_set_or_cycle(override, space_type = 'TEXT_EDITOR')
+ #bpy.ops.screen.region_quadview(override)
+
+
+
+
-class WORLD_MT_POV_presets(bpy.types.Menu):
+ bpy.data.workspaces.update()
+ # Already outliners but invert both types
+ pov_workspace[1].spaces[0].display_mode = 'LIBRARIES'
+ pov_workspace[3].spaces[0].display_mode = 'VIEW_LAYER'
+
+ '''
+ for window in bpy.context.window_manager.windows:
+ for area in [a for a in window.screen.areas if a.type == 'VIEW_3D']:
+ for region in [r for r in area.regions if r.type == 'WINDOW']:
+ context_override = {
+ 'window': window,
+ 'screen': window.screen,
+ 'area': area,
+ 'region': region,
+ 'space_data': area.spaces.active,
+ 'scene': bpy.context.scene
+ }
+ bpy.ops.view3d.camera_to_view(context_override)
+ '''
+
+
+ else:
+ print("default 'Scripting' workspace needed for POV centric Workspace")
+
+
+
+
+
+
+
+class WORLD_MT_POV_presets(Menu):
bl_label = "World Presets"
preset_subdir = "pov/world"
preset_operator = "script.execute_preset"
@@ -262,7 +395,7 @@ class WORLD_MT_POV_presets(bpy.types.Menu):
class WORLD_OT_POV_add_preset(AddPresetBase, Operator):
- '''Add a World Preset'''
+ """Add a World Preset"""
bl_idname = "object.world_preset_add"
bl_label = "Add World Preset"
@@ -324,6 +457,15 @@ def check_add_mesh_extra_objects():
return True
return False
+def check_render_freestyle_svg():
+ """Test if Freestyle SVG Exporter addon is activated
+
+ This addon is currently used to generate the SVG lines file
+ when Freestyle is enabled alongside POV
+ """
+ if "render_freestyle_svg" in bpy.context.preferences.addons.keys():
+ return True
+ return False
def locate_docpath():
"""POV can be installed with some include files.
@@ -373,31 +515,29 @@ def pov_context_tex_datablock(context):
"""Texture context type recreated as deprecated in blender 2.8"""
idblock = context.brush
- if idblock and bpy.context.scene.texture_context == 'OTHER':
+ if idblock and context.scene.texture_context == 'OTHER':
return idblock
# idblock = bpy.context.active_object.active_material
- idblock = bpy.context.scene.view_layers[
- "View Layer"
- ].objects.active.active_material
- if idblock:
+ idblock = context.view_layer.objects.active.active_material
+ if idblock and context.scene.texture_context == 'MATERIAL':
return idblock
- idblock = context.world
- if idblock:
+ idblock = context.scene.world
+ if idblock and context.scene.texture_context == 'WORLD':
return idblock
idblock = context.light
- if idblock:
+ if idblock and context.scene.texture_context == 'LIGHT':
return idblock
- if context.particle_system:
+ if context.particle_system and context.scene.texture_context == 'PARTICLES':
idblock = context.particle_system.settings
return idblock
idblock = context.line_style
- if idblock:
+ if idblock and context.scene.texture_context == 'LINESTYLE':
return idblock
@@ -688,7 +828,7 @@ class LIGHT_PT_POV_light(PovLampButtonsPanel, Panel):
draw = properties_data_light.DATA_PT_light.draw
-class LIGHT_MT_POV_presets(bpy.types.Menu):
+class LIGHT_MT_POV_presets(Menu):
"""Use this class to define preset menu for pov lights."""
bl_label = "Lamp Presets"
@@ -1114,9 +1254,8 @@ class WORLD_PT_POV_mist(WorldButtonsPanel, Panel):
class RENDER_PT_POV_export_settings(RenderButtonsPanel, Panel):
"""Use this class to define pov ini settingss buttons."""
-
- bl_label = "Start Options"
bl_options = {'DEFAULT_CLOSED'}
+ bl_label = "Auto Start"
COMPAT_ENGINES = {'POVRAY_RENDER'}
def draw_header(self, context):
@@ -1131,6 +1270,7 @@ class RENDER_PT_POV_export_settings(RenderButtonsPanel, Panel):
)
def draw(self, context):
+
layout = self.layout
scene = context.scene
@@ -1143,25 +1283,25 @@ class RENDER_PT_POV_export_settings(RenderButtonsPanel, Panel):
col.prop(scene.pov, "command_line_switches", text="")
split = layout.split()
- layout.active = not scene.pov.tempfiles_enable
- # if not scene.pov.tempfiles_enable:
- split.prop(scene.pov, "deletefiles_enable", text="Delete files")
- split.prop(scene.pov, "pov_editor", text="POV Editor")
+ #layout.active = not scene.pov.tempfiles_enable
+ if not scene.pov.tempfiles_enable:
+ split.prop(scene.pov, "deletefiles_enable", text="Delete files")
+ split.prop(scene.pov, "pov_editor", text="POV Editor")
- col = layout.column()
- col.prop(scene.pov, "scene_name", text="Name")
- col.prop(scene.pov, "scene_path", text="Path to files")
- # col.prop(scene.pov, "scene_path", text="Path to POV-file")
- # col.prop(scene.pov, "renderimage_path", text="Path to image")
+ col = layout.column()
+ col.prop(scene.pov, "scene_name", text="Name")
+ col.prop(scene.pov, "scene_path", text="Path to files")
+ # col.prop(scene.pov, "scene_path", text="Path to POV-file")
+ # col.prop(scene.pov, "renderimage_path", text="Path to image")
- split = layout.split()
- split.prop(scene.pov, "indentation_character", text="Indent")
- if scene.pov.indentation_character == 'SPACE':
- split.prop(scene.pov, "indentation_spaces", text="Spaces")
+ split = layout.split()
+ split.prop(scene.pov, "indentation_character", text="Indent")
+ if scene.pov.indentation_character == 'SPACE':
+ split.prop(scene.pov, "indentation_spaces", text="Spaces")
- row = layout.row()
- row.prop(scene.pov, "comments_enable", text="Comments")
- row.prop(scene.pov, "list_lf_enable", text="Line breaks in lists")
+ row = layout.row()
+ row.prop(scene.pov, "comments_enable", text="Comments")
+ row.prop(scene.pov, "list_lf_enable", text="Line breaks in lists")
class RENDER_PT_POV_render_settings(RenderButtonsPanel, Panel):
@@ -1414,7 +1554,7 @@ class RENDER_PT_POV_radiosity(RenderButtonsPanel, Panel):
col.prop(scene.pov, "radio_subsurface")
-class POV_RADIOSITY_MT_presets(bpy.types.Menu):
+class POV_RADIOSITY_MT_presets(Menu):
"""Use this class to define pov radiosity presets menu."""
bl_label = "Radiosity Presets"
@@ -1562,7 +1702,7 @@ class MODIFIERS_PT_POV_modifiers(ModifierButtonsPanel, Panel):
col.prop(ob.pov, "inside_vector")
-class MATERIAL_MT_POV_sss_presets(bpy.types.Menu):
+class MATERIAL_MT_POV_sss_presets(Menu):
"""Use this class to define pov sss preset menu."""
bl_label = "SSS Presets"
@@ -1872,7 +2012,7 @@ class MATERIAL_PT_POV_mirror(MaterialButtonsPanel, Panel):
sub = col.column()
sub.active = raym.gloss_factor < 1.0
sub.prop(raym, "gloss_threshold", text="Threshold")
- sub.prop(raym, "gloss_samples", text="Samples")
+ sub.prop(raym, "gloss_samples", text="Noise")
sub.prop(raym, "gloss_anisotropic", text="Anisotropic")
@@ -2178,7 +2318,7 @@ class MATERIAL_PT_POV_replacement_text(MaterialButtonsPanel, Panel):
col.prop(mat.pov, "replacement_text", text="")
-class TEXTURE_MT_POV_specials(bpy.types.Menu):
+class TEXTURE_MT_POV_specials(Menu):
"""Use this class to define pov texture slot operations buttons."""
bl_label = "Texture Specials"
@@ -2191,14 +2331,20 @@ class TEXTURE_MT_POV_specials(bpy.types.Menu):
layout.operator("texture.slot_paste", icon='PASTEDOWN')
-class TEXTURE_UL_POV_texture_slots(bpy.types.UIList):
- """Use this class to show pov texture slots list.""" # used?
-
- COMPAT_ENGINES = {'POVRAY_RENDER'}
+class WORLD_TEXTURE_SLOTS_UL_POV_layerlist(UIList):
+ """Use this class to show pov texture slots list.""" # XXX Not used yet
+ index: bpy.props.IntProperty(name='index')
def draw_item(
self, context, layout, data, item, icon, active_data, active_propname
):
+ world = context.scene.world # .pov
+ active_data = world.pov
+ # tex = context.texture #may be needed later?
+
+ # We could write some code to decide which icon to use here...
+ custom_icon = 'TEXTURE'
+
ob = data
slot = item
# ma = slot.name
@@ -2220,62 +2366,7 @@ class TEXTURE_UL_POV_texture_slots(bpy.types.UIList):
layout.label(text="", icon_value=icon)
-'''
-class MATERIAL_TEXTURE_SLOTS_UL_List(UIList):
- """Texture Slots UIList."""
-
-
- def draw_item(self, context, layout, material, item, icon, active_data,
- material_texture_list_index, index):
- material = context.material#.pov
- active_data = material
- #tex = context.texture #may be needed later?
-
-
- # We could write some code to decide which icon to use here...
- custom_icon = 'TEXTURE'
-
- # Make sure your code supports all 3 layout types
- if self.layout_type in {'DEFAULT', 'COMPACT'}:
- layout.label(item.name, icon = custom_icon)
-
- elif self.layout_type in {'GRID'}:
- layout.alignment = 'CENTER'
- layout.label("", icon = custom_icon)
-'''
-
-
-class WORLD_TEXTURE_SLOTS_UL_List(UIList):
- """Use this class to show pov texture slots list.""" # XXX Not used yet
-
- def draw_item(
- self,
- context,
- layout,
- world,
- item,
- icon,
- active_data,
- active_texture_index,
- index,
- ):
- world = context.world # .pov
- active_data = world.pov
- # tex = context.texture #may be needed later?
-
- # We could write some code to decide which icon to use here...
- custom_icon = 'TEXTURE'
-
- # Make sure your code supports all 3 layout types
- if self.layout_type in {'DEFAULT', 'COMPACT'}:
- layout.label(item.name, icon=custom_icon)
-
- elif self.layout_type in {'GRID'}:
- layout.alignment = 'CENTER'
- layout.label("", icon=custom_icon)
-
-
-class MATERIAL_TEXTURE_SLOTS_UL_POV_layerlist(bpy.types.UIList):
+class MATERIAL_TEXTURE_SLOTS_UL_POV_layerlist(UIList):
"""Use this class to show pov texture slots list."""
# texture_slots:
@@ -2304,6 +2395,53 @@ class MATERIAL_TEXTURE_SLOTS_UL_POV_layerlist(bpy.types.UIList):
layout.alignment = 'CENTER'
layout.label(text="", icon_value=icon)
+# Rewrite an existing class to modify.
+# register but not unregistered because
+# the modified parts concern only POVRAY_RENDER
+class TEXTURE_PT_context(TextureButtonsPanel, Panel):
+ bl_label = ""
+ bl_context = "texture"
+ bl_options = {'HIDE_HEADER'}
+ COMPAT_ENGINES = {'POVRAY_RENDER', 'BLENDER_EEVEE', 'BLENDER_WORKBENCH'}
+
+ @classmethod
+ def poll(cls, context):
+ return (
+ (context.scene.texture_context
+ not in('MATERIAL','WORLD','LIGHT','PARTICLES','LINESTYLE')
+ or context.scene.render.engine != 'POVRAY_RENDER')
+ )
+ def draw(self, context):
+ layout = self.layout
+ tex = context.texture
+ space = context.space_data
+ pin_id = space.pin_id
+ use_pin_id = space.use_pin_id
+ user = context.texture_user
+
+ col = layout.column()
+
+ if not (use_pin_id and isinstance(pin_id, bpy.types.Texture)):
+ pin_id = None
+
+ if not pin_id:
+ col.template_texture_user()
+
+ if user or pin_id:
+ col.separator()
+
+ if pin_id:
+ col.template_ID(space, "pin_id")
+ else:
+ propname = context.texture_user_property.identifier
+ col.template_ID(user, propname, new="texture.new")
+
+ if tex:
+ col.separator()
+
+ split = col.split(factor=0.2)
+ split.label(text="Type")
+ split.prop(tex, "type", text="")
class TEXTURE_PT_POV_context_texture(TextureButtonsPanel, Panel):
"""Use this class to show pov texture context buttons."""
@@ -2316,11 +2454,11 @@ class TEXTURE_PT_POV_context_texture(TextureButtonsPanel, Panel):
def poll(cls, context):
engine = context.scene.render.engine
return engine in cls.COMPAT_ENGINES
- # if not (hasattr(context, "texture_slot") or hasattr(context, "texture_node")):
+ # if not (hasattr(context, "pov_texture_slot") or hasattr(context, "texture_node")):
# return False
return (
context.material
- or context.world
+ or context.scene.world
or context.light
or context.texture
or context.line_style
@@ -2333,11 +2471,12 @@ class TEXTURE_PT_POV_context_texture(TextureButtonsPanel, Panel):
layout = self.layout
scene = context.scene
+ mat = context.view_layer.objects.active.active_material
+ wld = context.scene.world
+
layout.prop(scene, "texture_context", expand=True)
- if scene.texture_context == 'MATERIAL':
- mat = context.scene.view_layers[
- "View Layer"
- ].objects.active.active_material
+ if scene.texture_context == 'MATERIAL' and mat is not None:
+
row = layout.row()
row.template_list(
"MATERIAL_TEXTURE_SLOTS_UL_POV_layerlist",
@@ -2353,22 +2492,90 @@ class TEXTURE_PT_POV_context_texture(TextureButtonsPanel, Panel):
col = row.column(align=True)
col.operator("pov.textureslotadd", icon='ADD', text='')
col.operator("pov.textureslotremove", icon='REMOVE', text='')
+ #todo: recreate for pov_texture_slots?
+ #col.operator("texture.slot_move", text="", icon='TRIA_UP').type = 'UP'
+ #col.operator("texture.slot_move", text="", icon='TRIA_DOWN').type = 'DOWN'
col.separator()
if mat.pov_texture_slots:
index = mat.pov.active_texture_index
slot = mat.pov_texture_slots[index]
- povtex = slot.name
+ povtex = slot.texture#slot.name
tex = bpy.data.textures[povtex]
col.prop(tex, 'use_fake_user', text='')
- layout.label(text='Find texture:')
+ #layout.label(text='Linked Texture data browser:')
+ propname = slot.texture_search
+ # if slot.texture was a pointer to texture data rather than just a name string:
+ # layout.template_ID(povtex, "texture", new="texture.new")
+
layout.prop_search(
- slot, 'texture_search', bpy.data, 'textures', text=''
+ slot, 'texture_search', bpy.data, 'textures', text='', icon='TEXTURE'
)
+ try:
+ bpy.context.tool_settings.image_paint.brush.texture = bpy.data.textures[slot.texture_search]
+ bpy.context.tool_settings.image_paint.brush.mask_texture = bpy.data.textures[slot.texture_search]
+ except KeyError:
+ # texture not hand-linked by user
+ pass
+
+ if tex:
+ layout.separator()
+ split = layout.split(factor=0.2)
+ split.label(text="Type")
+ split.prop(tex, "type", text="")
+
# else:
# for i in range(18): # length of material texture slots
# mat.pov_texture_slots.add()
+ elif scene.texture_context == 'WORLD' and wld is not None:
+ row = layout.row()
+ row.template_list(
+ "WORLD_TEXTURE_SLOTS_UL_POV_layerlist",
+ "",
+ wld,
+ "pov_texture_slots",
+ wld.pov,
+ "active_texture_index",
+ rows=2,
+ maxrows=16,
+ type="DEFAULT"
+ )
+ col = row.column(align=True)
+ col.operator("pov.textureslotadd", icon='ADD', text='')
+ col.operator("pov.textureslotremove", icon='REMOVE', text='')
+
+ #todo: recreate for pov_texture_slots?
+ #col.operator("texture.slot_move", text="", icon='TRIA_UP').type = 'UP'
+ #col.operator("texture.slot_move", text="", icon='TRIA_DOWN').type = 'DOWN'
+ col.separator()
+
+ if wld.pov_texture_slots:
+ index = wld.pov.active_texture_index
+ slot = wld.pov_texture_slots[index]
+ povtex = slot.texture#slot.name
+ tex = bpy.data.textures[povtex]
+ col.prop(tex, 'use_fake_user', text='')
+ #layout.label(text='Linked Texture data browser:')
+ propname = slot.texture_search
+ # if slot.texture was a pointer to texture data rather than just a name string:
+ # layout.template_ID(povtex, "texture", new="texture.new")
+
+ layout.prop_search(
+ slot, 'texture_search', bpy.data, 'textures', text='', icon='TEXTURE'
+ )
+ try:
+ bpy.context.tool_settings.image_paint.brush.texture = bpy.data.textures[slot.texture_search]
+ bpy.context.tool_settings.image_paint.brush.mask_texture = bpy.data.textures[slot.texture_search]
+ except KeyError:
+ # texture not hand-linked by user
+ pass
+
+ if tex:
+ layout.separator()
+ split = layout.split(factor=0.2)
+ split.label(text="Type")
+ split.prop(tex, "type", text="")
# Commented out below is a reminder of what existed in Blender Internal
# attributes need to be recreated
@@ -2518,7 +2725,7 @@ class TEXTURE_PT_colors(TextureButtonsPanel, Panel):
# Texture Slot Panels #
-class MATERIAL_OT_POV_texture_slot_add(Operator):
+class TEXTURE_OT_POV_texture_slot_add(Operator):
"""Use this class for the add texture slot button."""
bl_idname = "pov.textureslotadd"
@@ -2528,18 +2735,29 @@ class MATERIAL_OT_POV_texture_slot_add(Operator):
COMPAT_ENGINES = {'POVRAY_RENDER'}
def execute(self, context):
-
+ idblock = pov_context_tex_datablock(context)
tex = bpy.data.textures.new(name='Texture', type='IMAGE')
- tex.use_fake_user = True
- ob = context.scene.view_layers["View Layer"].objects.active
- slot = ob.active_material.pov_texture_slots.add()
+ #tex.use_fake_user = True
+ #mat = context.view_layer.objects.active.active_material
+ slot = idblock.pov_texture_slots.add()
slot.name = tex.name
slot.texture = tex.name
+ slot.texture_search = tex.name
+ # Switch paint brush and paint brush mask
+ # to this texture so settings remain contextual
+ bpy.context.tool_settings.image_paint.brush.texture = tex
+ bpy.context.tool_settings.image_paint.brush.mask_texture = tex
+ idblock.pov.active_texture_index = (len(idblock.pov_texture_slots)-1)
+
+ #for area in bpy.context.screen.areas:
+ #if area.type in ['PROPERTIES']:
+ #area.tag_redraw()
+
return {'FINISHED'}
-class MATERIAL_OT_POV_texture_slot_remove(Operator):
+class TEXTURE_OT_POV_texture_slot_remove(Operator):
"""Use this class for the remove texture slot button."""
bl_idname = "pov.textureslotremove"
@@ -2549,14 +2767,23 @@ class MATERIAL_OT_POV_texture_slot_remove(Operator):
COMPAT_ENGINES = {'POVRAY_RENDER'}
def execute(self, context):
- pass
- # tex = bpy.data.textures.new()
- # tex_slot = context.object.active_material.pov_texture_slots.add()
- # tex_slot.name = tex.name
+ idblock = pov_context_tex_datablock(context)
+ #mat = context.view_layer.objects.active.active_material
+ tex_slot = idblock.pov_texture_slots.remove(idblock.pov.active_texture_index)
+ if idblock.pov.active_texture_index > 0:
+ idblock.pov.active_texture_index -= 1
+ try:
+ tex = idblock.pov_texture_slots[idblock.pov.active_texture_index].texture
+ except IndexError:
+ # No more slots
+ return {'FINISHED'}
+ # Switch paint brush to previous texture so settings remain contextual
+ # if 'tex' in locals(): # Would test is the tex variable is assigned / exists
+ bpy.context.tool_settings.image_paint.brush.texture = bpy.data.textures[tex]
+ bpy.context.tool_settings.image_paint.brush.mask_texture = bpy.data.textures[tex]
return {'FINISHED'}
-
class TextureSlotPanel(TextureButtonsPanel):
"""Use this class to show pov texture slots panel."""
@@ -2586,7 +2813,7 @@ class TEXTURE_PT_POV_type(TextureButtonsPanel, Panel):
tex = context.texture
split = layout.split(factor=0.2)
- split.label(text="POV:")
+ split.label(text="Pattern")
split.prop(tex.pov, "tex_pattern_type", text="")
# row = layout.row()
@@ -2631,6 +2858,7 @@ class TEXTURE_PT_POV_parameters(TextureButtonsPanel, Panel):
"""Use this class to define pov texture pattern buttons."""
bl_label = "POV Pattern Options"
+ bl_options = {'HIDE_HEADER'}
COMPAT_ENGINES = {'POVRAY_RENDER'}
def draw(self, context):
@@ -2911,6 +3139,112 @@ class TEXTURE_PT_POV_parameters(TextureButtonsPanel, Panel):
row.prop(tex.pov, "warp_turbulence_z", text="Z")
row.prop(tex.pov, "modifier_omega", text="Omega")
+class TEXTURE_PT_POV_mapping(TextureSlotPanel, Panel):
+ """Use this class to define POV texture mapping buttons"""
+ bl_label = "Mapping"
+ COMPAT_ENGINES = {'POVRAY_RENDER'}
+ bl_space_type = 'PROPERTIES'
+ bl_region_type = 'WINDOW'
+
+ @classmethod
+ def poll(cls, context):
+ idblock = pov_context_tex_datablock(context)
+ if isinstance(idblock, Brush) and not context.sculpt_object:
+ return False
+
+ if not getattr(context, "texture_slot", None):
+ return False
+
+ engine = context.scene.render.engine
+ return (engine in cls.COMPAT_ENGINES)
+
+ def draw(self, context):
+ layout = self.layout
+
+ idblock = pov_context_tex_datablock(context)
+
+ #tex = context.texture_slot
+ tex = mat.pov_texture_slots[
+ mat.active_texture_index
+ ]
+ if not isinstance(idblock, Brush):
+ split = layout.split(percentage=0.3)
+ col = split.column()
+ col.label(text="Coordinates:")
+ col = split.column()
+ col.prop(tex, "texture_coords", text="")
+
+ if tex.texture_coords == 'ORCO':
+ """
+ ob = context.object
+ if ob and ob.type == 'MESH':
+ split = layout.split(percentage=0.3)
+ split.label(text="Mesh:")
+ split.prop(ob.data, "texco_mesh", text="")
+ """
+ elif tex.texture_coords == 'UV':
+ split = layout.split(percentage=0.3)
+ split.label(text="Map:")
+ ob = context.object
+ if ob and ob.type == 'MESH':
+ split.prop_search(tex, "uv_layer", ob.data, "uv_textures", text="")
+ else:
+ split.prop(tex, "uv_layer", text="")
+
+ elif tex.texture_coords == 'OBJECT':
+ split = layout.split(percentage=0.3)
+ split.label(text="Object:")
+ split.prop(tex, "object", text="")
+
+ elif tex.texture_coords == 'ALONG_STROKE':
+ split = layout.split(percentage=0.3)
+ split.label(text="Use Tips:")
+ split.prop(tex, "use_tips", text="")
+
+ if isinstance(idblock, Brush):
+ if context.sculpt_object or context.image_paint_object:
+ brush_texture_settings(layout, idblock, context.sculpt_object)
+ else:
+ if isinstance(idblock, FreestyleLineStyle):
+ split = layout.split(percentage=0.3)
+ split.label(text="Projection:")
+ split.prop(tex, "mapping", text="")
+
+ split = layout.split(percentage=0.3)
+ split.separator()
+ row = split.row()
+ row.prop(tex, "mapping_x", text="")
+ row.prop(tex, "mapping_y", text="")
+ row.prop(tex, "mapping_z", text="")
+
+ elif isinstance(idblock, Material):
+ split = layout.split(percentage=0.3)
+ split.label(text="Projection:")
+ split.prop(tex, "mapping", text="")
+
+ split = layout.split()
+
+ col = split.column()
+ if tex.texture_coords in {'ORCO', 'UV'}:
+ col.prop(tex, "use_from_dupli")
+ if (idblock.type == 'VOLUME' and tex.texture_coords == 'ORCO'):
+ col.prop(tex, "use_map_to_bounds")
+ elif tex.texture_coords == 'OBJECT':
+ col.prop(tex, "use_from_original")
+ if (idblock.type == 'VOLUME'):
+ col.prop(tex, "use_map_to_bounds")
+ else:
+ col.label()
+
+ col = split.column()
+ row = col.row()
+ row.prop(tex, "mapping_x", text="")
+ row.prop(tex, "mapping_y", text="")
+ row.prop(tex, "mapping_z", text="")
+
+ row = layout.row()
+ row.column().prop(tex, "offset")
+ row.column().prop(tex, "scale")
class TEXTURE_PT_POV_influence(TextureSlotPanel, Panel):
"""Use this class to define pov texture influence buttons."""
@@ -2919,18 +3253,20 @@ class TEXTURE_PT_POV_influence(TextureSlotPanel, Panel):
COMPAT_ENGINES = {'POVRAY_RENDER'}
bl_space_type = 'PROPERTIES'
bl_region_type = 'WINDOW'
- # bl_context = 'texture'
+ #bl_context = 'texture'
@classmethod
def poll(cls, context):
idblock = pov_context_tex_datablock(context)
if (
- isinstance(idblock, Brush)
- and bpy.context.scene.texture_context == 'OTHER'
+ # isinstance(idblock, Brush) and # Brush used for everything since 2.8
+ context.scene.texture_context == 'OTHER'
): # XXX replace by isinstance(idblock, bpy.types.Brush) and ...
return False
- # if not getattr(context, "pov_texture_slot", None):
- # return False
+ # Specify below also for pov_world_texture_slots, lights etc.
+ # to display for various types of slots but only when any
+ if not getattr(idblock, "pov_texture_slots", None):
+ return False
engine = context.scene.render.engine
return engine in cls.COMPAT_ENGINES
@@ -2940,14 +3276,13 @@ class TEXTURE_PT_POV_influence(TextureSlotPanel, Panel):
layout = self.layout
idblock = pov_context_tex_datablock(context)
-
# tex = context.pov_texture_slot
- mat = bpy.context.active_object.active_material
- texslot = mat.pov_texture_slots[
- mat.active_texture_index
+ #mat = bpy.context.active_object.active_material
+ texslot = idblock.pov_texture_slots[
+ idblock.pov.active_texture_index
] # bpy.data.textures[mat.active_texture_index]
tex = bpy.data.textures[
- mat.pov_texture_slots[mat.active_texture_index].texture
+ idblock.pov_texture_slots[idblock.pov.active_texture_index].texture
]
def factor_but(layout, toggle, factor, name):
@@ -3756,7 +4091,7 @@ class OBJECT_PT_povray_replacement_text(ObjectButtonsPanel, Panel):
###############################################################################
-class VIEW_MT_POV_primitives_add(bpy.types.Menu):
+class VIEW_MT_POV_primitives_add(Menu):
"""Define the primitives menu with presets"""
bl_idname = "VIEW_MT_POV_primitives_add"
@@ -3777,7 +4112,7 @@ class VIEW_MT_POV_primitives_add(bpy.types.Menu):
layout.menu(VIEW_MT_POV_import.bl_idname, text="Import", icon="IMPORT")
-class VIEW_MT_POV_Basic_Shapes(bpy.types.Menu):
+class VIEW_MT_POV_Basic_Shapes(Menu):
"""Use this class to sort simple primitives menu entries."""
bl_idname = "POVRAY_MT_basic_shape_tools"
@@ -3859,7 +4194,7 @@ class VIEW_MT_POV_Basic_Shapes(bpy.types.Menu):
)
-class VIEW_MT_POV_import(bpy.types.Menu):
+class VIEW_MT_POV_import(Menu):
"""Use this class for the import menu."""
bl_idname = "POVRAY_MT_import_tools"
@@ -3910,7 +4245,7 @@ def menu_func_import(self, context):
# return True
-class NODE_MT_POV_map_create(bpy.types.Menu):
+class NODE_MT_POV_map_create(Menu):
"""Create maps"""
bl_idname = "POVRAY_MT_node_map_create"
@@ -4056,7 +4391,7 @@ def validinsert(ext):
return ext in {".txt", ".inc", ".pov"}
-class TEXT_MT_POV_insert(bpy.types.Menu):
+class TEXT_MT_POV_insert(Menu):
"""Use this class to create a menu launcher in text editor for the TEXT_OT_POV_insert operator ."""
bl_label = "Insert"
@@ -4116,10 +4451,10 @@ class TEXT_PT_POV_custom_code(TextButtonsPanel, Panel):
row = box.row()
row.prop(text.pov, "custom_code", expand=True)
if text.pov.custom_code in {'3dview'}:
- box.operator("render.render", icon='OUTLINER_DATA_POSE')
+ box.operator("render.render", icon='OUTLINER_DATA_ARMATURE')
if text.pov.custom_code in {'text'}:
rtext = bpy.context.space_data.text
- box.operator("text.run", icon='POSE_DATA')
+ box.operator("text.run", icon='ARMATURE_DATA')
# layout.prop(text.pov, "custom_code")
elif text.pov.custom_code in {'both'}:
box.operator("render.render", icon='POSE_HLT')
@@ -4133,7 +4468,7 @@ class TEXT_PT_POV_custom_code(TextButtonsPanel, Panel):
# Text editor templates from header menu
-class TEXT_MT_POV_templates(bpy.types.Menu):
+class TEXT_MT_POV_templates(Menu):
"""Use this class to create a menu for the same pov templates scenes as other pov IDEs."""
bl_label = "POV"
@@ -4154,12 +4489,111 @@ def menu_func_templates(self, context):
# Do not depend on POV being active renderer here...
self.layout.menu("TEXT_MT_POV_templates")
+###############################################################################
+# Freestyle
+###############################################################################
+#import addon_utils
+#addon_utils.paths()[0]
+#addon_utils.modules()
+#mod.bl_info['name'] == 'Freestyle SVG Exporter':
+bpy.utils.script_paths("addons")
+#render_freestyle_svg = os.path.join(bpy.utils.script_paths("addons"), "render_freestyle_svg.py")
+
+render_freestyle_svg = bpy.context.preferences.addons.get('render_freestyle_svg')
+ #mpath=addon_utils.paths()[0].render_freestyle_svg
+ #import mpath
+ #from mpath import render_freestyle_svg #= addon_utils.modules(['Freestyle SVG Exporter'])
+ #from scripts\\addons import render_freestyle_svg
+if check_render_freestyle_svg():
+ '''
+ snippetsWIP
+ import myscript
+ import importlib
+
+ importlib.reload(myscript)
+ myscript.main()
+ '''
+ for member in dir(render_freestyle_svg):
+ subclass = getattr(render_freestyle_svg, member)
+ try:
+ subclass.COMPAT_ENGINES.add('POVRAY_RENDER')
+ if subclass.bl_idname == "RENDER_PT_SVGExporterPanel":
+ subclass.bl_parent_id = "RENDER_PT_POV_filter"
+ subclass.bl_options = {'HIDE_HEADER'}
+ #subclass.bl_order = 11
+ print(subclass.bl_info)
+ except:
+ pass
+
+ #del render_freestyle_svg.RENDER_PT_SVGExporterPanel.bl_parent_id
+
+
+class RENDER_PT_POV_filter(RenderButtonsPanel, Panel):
+ """Use this class to invoke stuff like Freestyle UI."""
+
+ bl_label = "Freestyle"
+ bl_options = {'DEFAULT_CLOSED'}
+ COMPAT_ENGINES = {'POVRAY_RENDER'}
+
+ @classmethod
+ def poll(cls, context):
+ with_freestyle = bpy.app.build_options.freestyle
+ engine = context.scene.render.engine
+ return(with_freestyle and engine == 'POVRAY_RENDER')
+ def draw_header(self, context):
+
+ #scene = context.scene
+ rd = context.scene.render
+ layout = self.layout
+
+ if rd.use_freestyle:
+ layout.prop(
+ rd, "use_freestyle", text="", icon='LINE_DATA'
+ )
+
+ else:
+ layout.prop(
+ rd, "use_freestyle", text="", icon='OUTLINER_OB_IMAGE'
+ )
+
+ def draw(self, context):
+ rd = context.scene.render
+ layout = self.layout
+ layout.active = rd.use_freestyle
+ layout.use_property_split = True
+ layout.use_property_decorate = False # No animation.
+ flow = layout.grid_flow(
+ row_major=True,
+ columns=0,
+ even_columns=True,
+ even_rows=False,
+ align=True,
+ )
+
+ flow.prop(rd, "line_thickness_mode", expand=True)
+
+ if rd.line_thickness_mode == 'ABSOLUTE':
+ flow.prop(rd, "line_thickness")
+
+ # Warning if the Freestyle SVG Exporter addon is not enabled
+ if not check_render_freestyle_svg():
+ # col = box.column()
+ layout.label(
+ text="Please enable Freestyle SVG Exporter addon", icon="INFO"
+ )
+ # layout.separator()
+ layout.operator(
+ "preferences.addon_show",
+ text="Go to Render: Freestyle SVG Exporter addon",
+ icon="PREFERENCES",
+ ).module = "render_freestyle_svg"
classes = (
WORLD_PT_POV_world,
WORLD_MT_POV_presets,
WORLD_OT_POV_add_preset,
- WORLD_TEXTURE_SLOTS_UL_List,
+ WORLD_TEXTURE_SLOTS_UL_POV_layerlist,
+ #WORLD_TEXTURE_SLOTS_UL_List,
WORLD_PT_POV_mist,
# RenderButtonsPanel,
# ModifierButtonsPanel,
@@ -4188,6 +4622,7 @@ classes = (
RENDER_PT_POV_photons,
RENDER_PT_POV_antialias,
RENDER_PT_POV_radiosity,
+ RENDER_PT_POV_filter,
POV_RADIOSITY_MT_presets,
RENDER_OT_POV_radiosity_add_preset,
RENDER_PT_POV_media,
@@ -4232,13 +4667,13 @@ classes = (
TEXT_MT_POV_insert,
TEXT_PT_POV_custom_code,
TEXT_MT_POV_templates,
- # TEXTURE_PT_context,
- # TEXTURE_PT_POV_povray_texture_slots,
- TEXTURE_UL_POV_texture_slots,
+ #TEXTURE_PT_POV_povray_texture_slots,
+ #TEXTURE_UL_POV_texture_slots,
MATERIAL_TEXTURE_SLOTS_UL_POV_layerlist,
- MATERIAL_OT_POV_texture_slot_add,
- MATERIAL_OT_POV_texture_slot_remove,
+ TEXTURE_OT_POV_texture_slot_add,
+ TEXTURE_OT_POV_texture_slot_remove,
TEXTURE_PT_POV_influence,
+ TEXTURE_PT_POV_mapping,
)
@@ -4257,11 +4692,18 @@ def register():
# was used for parametric objects but made the other addon unreachable on
# unregister for other tools to use created a user action call instead
# addon_utils.enable("add_mesh_extra_objects", default_set=False, persistent=True)
-
# bpy.types.TEXTURE_PT_context_texture.prepend(TEXTURE_PT_POV_type)
+ if not povCentricWorkspace in bpy.app.handlers.load_post:
+ print("Adding POV wentric workspace on load handlers list")
+ bpy.app.handlers.load_post.append(povCentricWorkspace)
def unregister():
+ if povCentricWorkspace in bpy.app.handlers.load_post:
+ print("Removing POV wentric workspace from load handlers list")
+ bpy.app.handlers.load_post.remove(povCentricWorkspace)
+
+
# from bpy.utils import unregister_class
# bpy.types.TEXTURE_PT_context_texture.remove(TEXTURE_PT_POV_type)
@@ -4274,4 +4716,5 @@ def unregister():
bpy.types.VIEW3D_MT_add.remove(menu_func_add)
for cls in reversed(classes):
- unregister_class(cls)
+ if cls != TEXTURE_PT_context:
+ unregister_class(cls)
diff --git a/rigify/utils/bones.py b/rigify/utils/bones.py
index 51a4d44c..f0341a8f 100644
--- a/rigify/utils/bones.py
+++ b/rigify/utils/bones.py
@@ -382,7 +382,7 @@ class BoneUtilityMixin(object):
def new_bone(self, new_name):
"""Create a new bone with the specified name."""
name = new_bone(self.obj, new_name)
- self.register_new_bone(self, name)
+ self.register_new_bone(name)
return name
def copy_bone(self, bone_name, new_name='', *, parent=False, bbone=False, length=None, scale=None):
diff --git a/rigify/utils/mechanism.py b/rigify/utils/mechanism.py
index 3e7b2990..232fb7af 100644
--- a/rigify/utils/mechanism.py
+++ b/rigify/utils/mechanism.py
@@ -351,9 +351,10 @@ def reactivate_custom_properties(obj):
def copy_custom_properties(src, dest, *, prefix='', dest_prefix='', link_driver=False):
"""Copy custom properties with filtering by prefix. Optionally link using drivers."""
res = []
+ exclude = {'_RNA_UI', 'rigify_parameters', 'rigify_type'}
for key, value in src.items():
- if key.startswith(prefix):
+ if key.startswith(prefix) and key not in exclude:
new_key = dest_prefix + key[len(prefix):]
dest[new_key] = value
diff --git a/space_view3d_copy_attributes.py b/space_view3d_copy_attributes.py
index 30393a75..da741737 100644
--- a/space_view3d_copy_attributes.py
+++ b/space_view3d_copy_attributes.py
@@ -272,11 +272,7 @@ class CopySelectedPoseConstraints(Operator):
for bone in selected:
for index, flag in enumerate(self.selection):
if flag:
- old_constraint = active.constraints[index]
- new_constraint = bone.constraints.new(
- active.constraints[index].type
- )
- generic_copy(old_constraint, new_constraint)
+ bone.constraints.copy(active.constraints[index])
return {'FINISHED'}
diff --git a/space_view3d_modifier_tools.py b/space_view3d_modifier_tools.py
index aaef9b6a..1fb64635 100644
--- a/space_view3d_modifier_tools.py
+++ b/space_view3d_modifier_tools.py
@@ -58,7 +58,7 @@ class ApplyAllModifiers(Operator):
is_mod = True
try:
bpy.ops.object.modifier_apply(
- contx, apply_as='DATA',
+ contx,
modifier=contx['modifier'].name
)
except:
diff --git a/viewport_vr_preview.py b/viewport_vr_preview.py
index 8dc865f5..44e8897b 100644
--- a/viewport_vr_preview.py
+++ b/viewport_vr_preview.py
@@ -22,6 +22,11 @@ import bpy
from bpy.types import (
Gizmo,
GizmoGroup,
+ PropertyGroup,
+ UIList,
+ Menu,
+ Panel,
+ Operator,
)
from bpy.props import (
CollectionProperty,
@@ -32,9 +37,9 @@ from bpy.app.handlers import persistent
bl_info = {
"name": "VR Scene Inspection",
- "author": "Julian Eisel (Severin)",
- "version": (0, 2, 0),
- "blender": (2, 83, 0),
+ "author": "Julian Eisel (Severin), Sebastian Koenig",
+ "version": (0, 9, 0),
+ "blender": (2, 90, 0),
"location": "3D View > Sidebar > VR",
"description": ("View the viewport with virtual reality glasses "
"(head-mounted displays)"),
@@ -65,8 +70,8 @@ def xr_landmark_active_type_update(self, context):
session_settings.base_pose_type = 'SCENE_CAMERA'
elif landmark_active.type == 'USER_CAMERA':
session_settings.base_pose_type = 'OBJECT'
- # elif landmark_active.type == 'CUSTOM':
- # session_settings.base_pose_type = 'CUSTOM'
+ elif landmark_active.type == 'CUSTOM':
+ session_settings.base_pose_type = 'CUSTOM'
def xr_landmark_active_camera_update(self, context):
@@ -144,10 +149,24 @@ def xr_landmark_active_update(self, context):
xr_landmark_active_base_pose_angle_update(self, context)
if wm.xr_session_state:
- wm.xr_session_state.reset_to_base_pose(context)
+ wm.xr_session_state.reset_to_base_pose(context)
-class VRLandmark(bpy.types.PropertyGroup):
+class VIEW3D_MT_landmark_menu(Menu):
+ bl_label = "Landmark Controls"
+
+ def draw(self, _context):
+ layout = self.layout
+
+ layout.operator("view3d.vr_landmark_from_camera")
+ layout.operator("view3d.update_vr_landmark")
+ layout.separator()
+ layout.operator("view3d.cursor_to_vr_landmark")
+ layout.operator("view3d.camera_to_vr_landmark")
+ layout.operator("view3d.add_camera_from_vr_landmark")
+
+
+class VRLandmark(PropertyGroup):
name: bpy.props.StringProperty(
name="VR Landmark",
default="Landmark"
@@ -161,11 +180,9 @@ class VRLandmark(bpy.types.PropertyGroup):
('USER_CAMERA', "Custom Camera",
"Use an existing camera to define the VR view base location and "
"rotation"),
- # Custom base poses work, but it's uncertain if they are really
- # needed. Disabled for now.
- # ('CUSTOM', "Custom Pose",
- # "Allow a manually definied position and rotation to be used as "
- # "the VR view base pose"),
+ ('CUSTOM', "Custom Pose",
+ "Allow a manually defined position and rotation to be used as "
+ "the VR view base pose"),
],
default='SCENE_CAMERA',
update=xr_landmark_type_update,
@@ -209,7 +226,7 @@ class VRLandmark(bpy.types.PropertyGroup):
)
-class VIEW3D_UL_vr_landmarks(bpy.types.UIList):
+class VIEW3D_UL_vr_landmarks(UIList):
def draw_item(self, context, layout, _data, item, icon, _active_data,
_active_propname, index):
landmark = item
@@ -227,7 +244,7 @@ class VIEW3D_UL_vr_landmarks(bpy.types.UIList):
props.index = index
-class VIEW3D_PT_vr_landmarks(bpy.types.Panel):
+class VIEW3D_PT_vr_landmarks(Panel):
bl_space_type = 'VIEW_3D'
bl_region_type = 'UI'
bl_category = "VR"
@@ -250,20 +267,23 @@ class VIEW3D_PT_vr_landmarks(bpy.types.Panel):
col = row.column(align=True)
col.operator("view3d.vr_landmark_add", icon='ADD', text="")
col.operator("view3d.vr_landmark_remove", icon='REMOVE', text="")
+ col.operator("view3d.vr_landmark_from_session", icon='PLUS', text="")
+
+ col.menu("VIEW3D_MT_landmark_menu", icon='DOWNARROW_HLT', text="")
if landmark_selected:
layout.prop(landmark_selected, "type")
if landmark_selected.type == 'USER_CAMERA':
layout.prop(landmark_selected, "base_pose_camera")
- # elif landmark_selected.type == 'CUSTOM':
- # layout.prop(landmark_selected,
- # "base_pose_location", text="Location")
- # layout.prop(landmark_selected,
- # "base_pose_angle", text="Angle")
+ elif landmark_selected.type == 'CUSTOM':
+ layout.prop(landmark_selected,
+ "base_pose_location", text="Location")
+ layout.prop(landmark_selected,
+ "base_pose_angle", text="Angle")
-class VIEW3D_PT_vr_session_view(bpy.types.Panel):
+class VIEW3D_PT_vr_session_view(Panel):
bl_space_type = 'VIEW_3D'
bl_region_type = 'UI'
bl_category = "VR"
@@ -285,7 +305,7 @@ class VIEW3D_PT_vr_session_view(bpy.types.Panel):
col.prop(session_settings, "clip_end", text="End")
-class VIEW3D_PT_vr_session(bpy.types.Panel):
+class VIEW3D_PT_vr_session(Panel):
bl_space_type = 'VIEW_3D'
bl_region_type = 'UI'
bl_category = "VR"
@@ -295,7 +315,8 @@ class VIEW3D_PT_vr_session(bpy.types.Panel):
layout = self.layout
session_settings = context.window_manager.xr_session_settings
- layout.use_property_split = False
+ layout.use_property_split = True
+ layout.use_property_decorate = False # No animation.
is_session_running = bpy.types.XrSessionState.is_running(context)
@@ -313,7 +334,22 @@ class VIEW3D_PT_vr_session(bpy.types.Panel):
layout.prop(session_settings, "use_positional_tracking")
-class VIEW3D_OT_vr_landmark_add(bpy.types.Operator):
+class VIEW3D_PT_vr_info(bpy.types.Panel):
+ bl_space_type = 'VIEW_3D'
+ bl_region_type = 'UI'
+ bl_category = "VR"
+ bl_label = "VR Info"
+
+ @classmethod
+ def poll(cls, context):
+ return not bpy.app.build_options.xr_openxr
+
+ def draw(self, context):
+ layout = self.layout
+ layout.label(icon='ERROR', text="Built without VR/OpenXR features.")
+
+
+class VIEW3D_OT_vr_landmark_add(Operator):
bl_idname = "view3d.vr_landmark_add"
bl_label = "Add VR Landmark"
bl_description = "Add a new VR landmark to the list and select it"
@@ -331,7 +367,93 @@ class VIEW3D_OT_vr_landmark_add(bpy.types.Operator):
return {'FINISHED'}
-class VIEW3D_OT_vr_landmark_remove(bpy.types.Operator):
+class VIEW3D_OT_vr_landmark_from_camera(Operator):
+ bl_idname = "view3d.vr_landmark_from_camera"
+ bl_label = "Add VR Landmark from camera"
+ bl_description = "Add a new VR landmark from the active camera object to the list and select it"
+ bl_options = {'UNDO', 'REGISTER'}
+
+ @classmethod
+ def poll(cls, context):
+ cam_selected = False
+
+ vl_objects = bpy.context.view_layer.objects
+ if vl_objects.active and vl_objects.active.type == 'CAMERA':
+ cam_selected = True
+ return cam_selected
+
+ def execute(self, context):
+ scene = context.scene
+ landmarks = scene.vr_landmarks
+ cam = context.view_layer.objects.active
+ lm = landmarks.add()
+ lm.type = 'USER_CAMERA'
+ lm.base_pose_camera = cam
+ lm.name = "LM_" + cam.name
+
+ # select newly created set
+ scene.vr_landmarks_selected = len(landmarks) - 1
+
+ return {'FINISHED'}
+
+
+class VIEW3D_OT_vr_landmark_from_session(Operator):
+ bl_idname = "view3d.vr_landmark_from_session"
+ bl_label = "Add VR Landmark from session"
+ bl_description = "Add VR landmark from the viewer pose of the running VR session to the list and select it"
+ bl_options = {'UNDO', 'REGISTER'}
+
+ @classmethod
+ def poll(cls, context):
+ return bpy.types.XrSessionState.is_running(context)
+
+ def execute(self, context):
+ scene = context.scene
+ landmarks = scene.vr_landmarks
+ wm = context.window_manager
+
+ lm = landmarks.add()
+ lm.type = "CUSTOM"
+ scene.vr_landmarks_selected = len(landmarks) - 1
+
+ loc = wm.xr_session_state.viewer_pose_location
+ rot = wm.xr_session_state.viewer_pose_rotation.to_euler()
+
+ lm.base_pose_location = loc
+ lm.base_pose_angle = rot[2]
+
+ return {'FINISHED'}
+
+
+class VIEW3D_OT_update_vr_landmark(Operator):
+ bl_idname = "view3d.update_vr_landmark"
+ bl_label = "Update Custom VR Landmark"
+ bl_description = "Update the selected landmark from the current viewer pose in the VR session"
+ bl_options = {'UNDO', 'REGISTER'}
+
+ @classmethod
+ def poll(cls, context):
+ selected_landmark = VRLandmark.get_selected_landmark(context)
+ return bpy.types.XrSessionState.is_running(context) and selected_landmark.type == 'CUSTOM'
+
+ def execute(self, context):
+ wm = context.window_manager
+
+ lm = VRLandmark.get_selected_landmark(context)
+
+ loc = wm.xr_session_state.viewer_pose_location
+ rot = wm.xr_session_state.viewer_pose_rotation.to_euler()
+
+ lm.base_pose_location = loc
+ lm.base_pose_angle = rot
+
+ # Re-activate the landmark to trigger viewer reset and flush landmark settings to the session settings.
+ xr_landmark_active_update(None, context)
+
+ return {'FINISHED'}
+
+
+class VIEW3D_OT_vr_landmark_remove(Operator):
bl_idname = "view3d.vr_landmark_remove"
bl_label = "Remove VR Landmark"
bl_description = "Delete the selected VR landmark from the list"
@@ -350,7 +472,83 @@ class VIEW3D_OT_vr_landmark_remove(bpy.types.Operator):
return {'FINISHED'}
-class VIEW3D_OT_vr_landmark_activate(bpy.types.Operator):
+class VIEW3D_OT_cursor_to_vr_landmark(Operator):
+ bl_idname = "view3d.cursor_to_vr_landmark"
+ bl_label = "Cursor to VR Landmark"
+ bl_description = "Move the 3D Cursor to the selected VR Landmark"
+ bl_options = {'UNDO', 'REGISTER'}
+
+ @classmethod
+ def poll(cls, context):
+ lm = VRLandmark.get_selected_landmark(context)
+ if lm.type == 'SCENE_CAMERA':
+ return context.scene.camera is not None
+ elif lm.type == 'USER_CAMERA':
+ return lm.base_pose_camera is not None
+
+ return True
+
+ def execute(self, context):
+ scene = context.scene
+ lm = VRLandmark.get_selected_landmark(context)
+ if lm.type == 'SCENE_CAMERA':
+ lm_pos = scene.camera.location
+ elif lm.type == 'USER_CAMERA':
+ lm_pos = lm.base_pose_camera.location
+ else:
+ lm_pos = lm.base_pose_location
+ scene.cursor.location = lm_pos
+
+ return{'FINISHED'}
+
+
+class VIEW3d_OT_add_camera_from_vr_landmark(Operator):
+ bl_idname = "view3d.add_camera_from_vr_landmark"
+ bl_label = "New Camera from VR Landmark"
+ bl_description = "Create a new Camera from the selected VR Landmark"
+ bl_options = {'UNDO', 'REGISTER'}
+
+ def execute(self, context):
+ import math
+
+ scene = context.scene
+ lm = VRLandmark.get_selected_landmark(context)
+
+ cam = bpy.data.cameras.new("Camera_" + lm.name)
+ new_cam = bpy.data.objects.new("Camera_" + lm.name, cam)
+ scene.collection.objects.link(new_cam)
+ angle = lm.base_pose_angle
+ new_cam.location = lm.base_pose_location
+ new_cam.rotation_euler = (math.pi, 0, angle)
+
+ return {'FINISHED'}
+
+
+class VIEW3D_OT_camera_to_vr_landmark(Operator):
+ bl_idname = "view3d.camera_to_vr_landmark"
+ bl_label = "Scene Camera to VR Landmark"
+ bl_description = "Position the scene camera at the selected landmark"
+ bl_options = {'UNDO', 'REGISTER'}
+
+ @classmethod
+ def poll(cls, context):
+ return context.scene.camera is not None
+
+ def execute(self, context):
+ import math
+
+ scene = context.scene
+ lm = VRLandmark.get_selected_landmark(context)
+
+ cam = scene.camera
+ angle = lm.base_pose_angle
+ cam.location = lm.base_pose_location
+ cam.rotation_euler = (math.pi / 2, 0, angle)
+
+ return {'FINISHED'}
+
+
+class VIEW3D_OT_vr_landmark_activate(Operator):
bl_idname = "view3d.vr_landmark_activate"
bl_label = "Activate VR Landmark"
bl_description = "Change to the selected VR landmark from the list"
@@ -375,7 +573,7 @@ class VIEW3D_OT_vr_landmark_activate(bpy.types.Operator):
return {'FINISHED'}
-class VIEW3D_PT_vr_viewport_feedback(bpy.types.Panel):
+class VIEW3D_PT_vr_viewport_feedback(Panel):
bl_space_type = 'VIEW_3D'
bl_region_type = 'UI'
bl_category = "VR"
@@ -394,6 +592,7 @@ class VIEW3D_PT_vr_viewport_feedback(bpy.types.Panel):
layout.separator()
layout.prop(view3d.shading, "vr_show_virtual_camera")
+ layout.prop(view3d.shading, "vr_show_landmarks")
layout.prop(view3d, "mirror_xr_session")
@@ -483,6 +682,66 @@ class VIEW3D_GGT_vr_viewer_pose(GizmoGroup):
self.gizmo.matrix_basis = self._get_viewer_pose_matrix(context)
+class VIEW3D_GGT_vr_landmarks(GizmoGroup):
+ bl_idname = "VIEW3D_GGT_vr_landmarks"
+ bl_label = "VR Landmark Indicators"
+ bl_space_type = 'VIEW_3D'
+ bl_region_type = 'WINDOW'
+ bl_options = {'3D', 'PERSISTENT', 'SCALE'}
+
+ @classmethod
+ def poll(cls, context):
+ view3d = context.space_data
+ return (
+ view3d.shading.vr_show_landmarks
+ )
+
+ def setup(self, context):
+ pass
+
+ def draw_prepare(self, context):
+ # first delete the old gizmos
+ for g in self.gizmos:
+ self.gizmos.remove(g)
+
+ from math import radians
+ from mathutils import Matrix, Euler
+ scene = context.scene
+ landmarks = scene.vr_landmarks
+
+ for lm in landmarks:
+ if ((lm.type == 'SCENE_CAMERA' and not scene.camera) or
+ (lm.type == 'USER_CAMERA' and not lm.base_pose_camera)):
+ continue
+
+ gizmo = self.gizmos.new(VIEW3D_GT_vr_camera_cone.bl_idname)
+ gizmo.aspect = 1 / 3, 1 / 4
+
+ gizmo.color = gizmo.color_highlight = 0.2, 1.0, 0.6
+ gizmo.alpha = 1.0
+
+ self.gizmo = gizmo
+
+ if lm.type == 'SCENE_CAMERA':
+ cam = scene.camera
+ lm_mat = cam.matrix_world if cam else Matrix.Identity(4)
+ elif lm.type == 'USER_CAMERA':
+ lm_mat = lm.base_pose_camera.matrix_world
+ else:
+ angle = lm.base_pose_angle
+ raw_rot = Euler((radians(90.0), 0, angle))
+
+ rotmat = Matrix.Identity(3)
+ rotmat.rotate(raw_rot)
+ rotmat.resize_4x4()
+
+ transmat = Matrix.Translation(lm.base_pose_location)
+
+ lm_mat = transmat @ rotmat
+
+ self.gizmo.matrix_basis = lm_mat
+
+
classes = (
VIEW3D_PT_vr_session,
VIEW3D_PT_vr_session_view,
@@ -491,18 +750,27 @@ classes = (
VRLandmark,
VIEW3D_UL_vr_landmarks,
+ VIEW3D_MT_landmark_menu,
VIEW3D_OT_vr_landmark_add,
VIEW3D_OT_vr_landmark_remove,
VIEW3D_OT_vr_landmark_activate,
+ VIEW3D_OT_vr_landmark_from_session,
+ VIEW3d_OT_add_camera_from_vr_landmark,
+ VIEW3D_OT_camera_to_vr_landmark,
+ VIEW3D_OT_vr_landmark_from_camera,
+ VIEW3D_OT_cursor_to_vr_landmark,
+ VIEW3D_OT_update_vr_landmark,
VIEW3D_GT_vr_camera_cone,
VIEW3D_GGT_vr_viewer_pose,
+ VIEW3D_GGT_vr_landmarks,
)
def register():
if not bpy.app.build_options.xr_openxr:
+ bpy.utils.register_class(VIEW3D_PT_vr_info)
return
for cls in classes:
@@ -523,12 +791,16 @@ def register():
bpy.types.View3DShading.vr_show_virtual_camera = BoolProperty(
name="Show VR Camera"
)
+ bpy.types.View3DShading.vr_show_landmarks = BoolProperty(
+ name="Show Landmarks"
+ )
bpy.app.handlers.load_post.append(ensure_default_vr_landmark)
def unregister():
if not bpy.app.build_options.xr_openxr:
+ bpy.utils.unregister_class(VIEW3D_PT_vr_info)
return
for cls in classes:
@@ -538,6 +810,7 @@ def unregister():
del bpy.types.Scene.vr_landmarks_selected
del bpy.types.Scene.vr_landmarks_active
del bpy.types.View3DShading.vr_show_virtual_camera
+ del bpy.types.View3DShading.vr_show_landmarks
bpy.app.handlers.load_post.remove(ensure_default_vr_landmark)