Welcome to mirror list, hosted at ThFree Co, Russian Federation.

git.blender.org/blender-addons.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorCoDEmanX <codemanx@gmx.de>2014-02-04 06:05:05 +0400
committerCoDEmanX <codemanx@gmx.de>2014-02-04 06:05:05 +0400
commit049a5a86edcf78b070b73b141b657825145ae9a1 (patch)
tree6e5edf67d7974e0ad8beb209c57fd48c456ecfbe
parentd9fcbd06018165cc2cfca0673c7c2db44b04a833 (diff)
Clean-up: Updated bl_info['tracker_url'] to developer.blender.org, some minor other edits
-rw-r--r--add_curve_extra_objects/__init__.py7
-rw-r--r--add_curve_extra_objects/add_curve_aceous_galore.py74
-rw-r--r--add_curve_extra_objects/add_curve_torus_knots.py35
-rw-r--r--add_curve_ivygen.py9
-rw-r--r--add_curve_sapling/__init__.py44
-rw-r--r--add_mesh_BoltFactory/__init__.py8
-rw-r--r--add_mesh_ant_landscape.py9
-rw-r--r--add_mesh_extra_objects/__init__.py9
-rw-r--r--add_mesh_extra_objects/add_mesh_3d_function_surface.py5
-rw-r--r--add_mesh_extra_objects/add_mesh_gears.py13
-rw-r--r--add_mesh_extra_objects/add_mesh_gemstones.py5
-rw-r--r--add_mesh_extra_objects/add_mesh_honeycomb.py80
-rw-r--r--add_mesh_extra_objects/add_mesh_polysphere.py14
-rw-r--r--add_mesh_extra_objects/add_mesh_twisted_torus.py7
-rw-r--r--add_mesh_pipe_joint.py5
-rw-r--r--add_mesh_solid.py71
-rw-r--r--animation_add_corrective_shape_key.py3
-rw-r--r--animation_animall.py5
-rw-r--r--curve_simplify.py5
-rw-r--r--development_api_navigator.py3
-rw-r--r--development_icon_get.py3
-rw-r--r--game_engine_save_as_runtime.py3
-rw-r--r--io_anim_acclaim/__init__.py5
-rw-r--r--io_anim_c3d/__init__.py5
-rw-r--r--io_anim_camera.py5
-rw-r--r--io_anim_nuke_chan/__init__.py5
-rw-r--r--io_coat3D/__init__.py18
-rw-r--r--io_convert_image_to_mesh_img/__init__.py5
-rw-r--r--io_curve_svg/__init__.py6
-rw-r--r--io_export_after_effects.py8
-rw-r--r--io_export_dxf/__init__.py9
-rw-r--r--io_export_pc2.py31
-rw-r--r--io_export_unreal_psk_psa.py697
-rw-r--r--io_import_gimp_image_to_scene.py3
-rw-r--r--io_import_images_as_planes.py2
-rw-r--r--io_import_scene_dxf.py3
-rw-r--r--io_import_scene_lwo.py15
-rw-r--r--io_import_scene_mhx.py3
-rw-r--r--io_import_scene_unreal_psa_psk.py3
-rw-r--r--io_mesh_pdb/__init__.py99
-rw-r--r--io_mesh_raw/__init__.py5
-rw-r--r--io_mesh_stl/__init__.py6
-rw-r--r--io_mesh_uv_layout/__init__.py6
-rw-r--r--io_scene_ms3d/__init__.py13
-rw-r--r--io_scene_x/__init__.py38
-rw-r--r--light_field_tools/__init__.py5
-rw-r--r--mesh_bsurfaces.py2332
-rw-r--r--mesh_f2.py7
-rw-r--r--mesh_inset/__init__.py8
-rw-r--r--mesh_looptools.py625
-rw-r--r--mesh_relax.py15
-rw-r--r--mocap/__init__.py7
-rw-r--r--netrender/__init__.py8
-rw-r--r--node_efficiency_tools.py5
-rw-r--r--object_add_chain.py13
-rw-r--r--object_animrenderbake.py14
-rw-r--r--object_cloud_gen.py3
-rw-r--r--object_edit_linked.py2
-rw-r--r--object_fracture/__init__.py7
-rw-r--r--object_grease_scatter.py3
-rw-r--r--paint_palette.py2
-rw-r--r--render_copy_settings/__init__.py12
-rw-r--r--render_povray/__init__.py11
-rw-r--r--render_renderfarmfi/__init__.py8
-rw-r--r--space_view3d_3d_navigation.py29
-rw-r--r--space_view3d_copy_attributes.py3
-rw-r--r--space_view3d_materials_utils.py3
-rw-r--r--space_view3d_math_vis/__init__.py6
-rw-r--r--space_view3d_panel_measure.py3
-rw-r--r--space_view3d_screencast_keys.py13
-rw-r--r--space_view3d_spacebar_menu.py5
-rw-r--r--system_blend_info.py3
-rw-r--r--system_property_chart.py3
-rw-r--r--texture_paint_layer_manager.py2
-rw-r--r--ui_translate/__init__.py6
-rw-r--r--ui_translate/edit_translation.py4
-rw-r--r--uv_bake_texture_to_vcols.py170
-rw-r--r--uv_texture_atlas.py2
78 files changed, 2352 insertions, 2394 deletions
diff --git a/add_curve_extra_objects/__init__.py b/add_curve_extra_objects/__init__.py
index 95c358f0..cd9f8c96 100644
--- a/add_curve_extra_objects/__init__.py
+++ b/add_curve_extra_objects/__init__.py
@@ -26,10 +26,9 @@ bl_info = {
"location": "View3D > Add > Curve > Extra Objects",
"description": "Add extra curve object types",
"warning": "",
- "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/"\
+ "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/"
"Scripts/Curve/Curve_Objects",
- "tracker_url": "https://projects.blender.org/tracker/index.php?"\
- "func=detail&aid=32477",
+ "tracker_url": "https://developer.blender.org/T32477",
"category": "Add Curve"}
@@ -38,7 +37,7 @@ if "bpy" in locals():
imp.reload(add_curve_aceous_galore)
imp.reload(add_curve_spirals)
imp.reload(add_curve_torus_knots)
-
+
else:
from . import add_curve_aceous_galore
from . import add_curve_spirals
diff --git a/add_curve_extra_objects/add_curve_aceous_galore.py b/add_curve_extra_objects/add_curve_aceous_galore.py
index 85063de6..2389f00f 100644
--- a/add_curve_extra_objects/add_curve_aceous_galore.py
+++ b/add_curve_extra_objects/add_curve_aceous_galore.py
@@ -19,18 +19,18 @@
bl_info = {
'name': 'Curveaceous Galore!',
'author': 'Jimmy Hazevoet, testscreenings',
- 'version': (0,2),
+ 'version': (0, 2),
"blender": (2, 59, 0),
'location': 'View3D > Add > Curve',
'description': 'Adds many different types of Curves',
'warning': '', # used for warning icon and text in addons panel
- 'wiki_url': 'http://wiki.blender.org/index.php/Extensions:2.6/Py/' \
+ 'wiki_url': 'http://wiki.blender.org/index.php/Extensions:2.6/Py/'
'Scripts/Curve/Curves_Galore',
- 'tracker_url': 'https://projects.blender.org/tracker/index.php?'\
- 'func=detail&aid=22404',
+ 'tracker_url': 'https://developer.blender.org/T22404',
'category': 'Add Curve'}
'''
+
##------------------------------------------------------------
#### import modules
import bpy
@@ -47,9 +47,9 @@ import mathutils.noise as Noise
def randnum(low=0.0, high=1.0, seed=0):
"""
randnum( low=0.0, high=1.0, seed=0 )
-
+
Create random number
-
+
Parameters:
low - lower range
(type=float)
@@ -74,9 +74,9 @@ def randnum(low=0.0, high=1.0, seed=0):
def vTurbNoise(x,y,z, iScale=0.25, Size=1.0, Depth=6, Hard=0, Basis=0, Seed=0):
"""
vTurbNoise((x,y,z), iScale=0.25, Size=1.0, Depth=6, Hard=0, Basis=0, Seed=0 )
-
+
Create randomised vTurbulence noise
-
+
Parameters:
xyz - (x,y,z) float values.
(type=3-float tuple)
@@ -126,9 +126,9 @@ def AxisFlip(x,y,z, x_axis=1, y_axis=1, z_axis=1, flip=0 ):
def ProfileCurve(type=0, a=0.25, b=0.25):
"""
ProfileCurve( type=0, a=0.25, b=0.25 )
-
- Create profile curve
-
+
+ Create profile curve
+
Parameters:
type - select profile type, L, H, T, U, Z
(type=int)
@@ -182,9 +182,9 @@ def ProfileCurve(type=0, a=0.25, b=0.25):
def MiscCurve(type=1, a=1.0, b=0.5, c=1.0):
"""
MiscCurve( type=1, a=1.0, b=0.5, c=1.0 )
-
+
Create miscellaneous curves
-
+
Parameters:
type - select type, Diamond, Arrow1, Arrow2, Square
(type=int)
@@ -227,12 +227,12 @@ def MiscCurve(type=1, a=1.0, b=0.5, c=1.0):
x = a / 2
y = b / 2
r = c / 2
-
+
if r > x:
r = x - 0.0001
if r > y:
r = y - 0.0001
-
+
if r>0:
newpoints.append([-x+r,y,0])
newpoints.append([x-r,y,0])
@@ -258,9 +258,9 @@ def MiscCurve(type=1, a=1.0, b=0.5, c=1.0):
def StarCurve(starpoints=8, innerradius=0.5, outerradius=1.0, twist=0.0):
"""
StarCurve( starpoints=8, innerradius=0.5, outerradius=1.0, twist=0.0 )
-
+
Create star shaped curve
-
+
Parameters:
starpoints - the number of points
(type=int)
@@ -294,9 +294,9 @@ def StarCurve(starpoints=8, innerradius=0.5, outerradius=1.0, twist=0.0):
def FlowerCurve(petals=8, innerradius=0.5, outerradius=1.0, petalwidth=2.0):
"""
FlowerCurve( petals=8, innerradius=0.5, outerradius=1.0, petalwidth=2.0 )
-
+
Create flower shaped curve
-
+
Parameters:
petals - the number of petals
(type=int)
@@ -334,9 +334,9 @@ def FlowerCurve(petals=8, innerradius=0.5, outerradius=1.0, petalwidth=2.0):
def ArcCurve(sides=6, startangle=0.0, endangle=90.0, innerradius=0.5, outerradius=1.0, type=3):
"""
ArcCurve( sides=6, startangle=0.0, endangle=90.0, innerradius=0.5, outerradius=1.0, type=3 )
-
+
Create arc shaped curve
-
+
Parameters:
sides - number of sides
(type=int)
@@ -386,13 +386,13 @@ def ArcCurve(sides=6, startangle=0.0, endangle=90.0, innerradius=0.5, outerradiu
return newpoints
##------------------------------------------------------------
-# 2DCurve: Cog wheel:
+# 2DCurve: Cog wheel:
def CogCurve(theeth=8, innerradius=0.8, middleradius=0.95, outerradius=1.0, bevel=0.5):
"""
CogCurve( theeth=8, innerradius=0.8, middleradius=0.95, outerradius=1.0, bevel=0.5 )
-
+
Create cog wheel shaped curve
-
+
Parameters:
theeth - number of theeth
(type=int)
@@ -442,9 +442,9 @@ def CogCurve(theeth=8, innerradius=0.8, middleradius=0.95, outerradius=1.0, beve
def nSideCurve(sides=6, radius=1.0):
"""
nSideCurve( sides=6, radius=1.0 )
-
+
Create n-sided curve
-
+
Parameters:
sides - number of sides
(type=int)
@@ -472,9 +472,9 @@ def nSideCurve(sides=6, radius=1.0):
def SplatCurve(sides=24, scale=1.0, seed=0, basis=0, radius=1.0):
"""
SplatCurve( sides=24, scale=1.0, seed=0, basis=0, radius=1.0 )
-
+
Create splat curve
-
+
Parameters:
sides - number of sides
(type=int)
@@ -513,9 +513,9 @@ def SplatCurve(sides=24, scale=1.0, seed=0, basis=0, radius=1.0):
def HelixCurve( number=100, height=2.0, startangle=0.0, endangle=360.0, width=1.0, a=0.0, b=0.0 ):
"""
HelixCurve( number=100, height=2.0, startangle=0.0, endangle=360.0, width=1.0, a=0.0, b=0.0 )
-
+
Create helix curve
-
+
Parameters:
number - the number of points
(type=int)
@@ -539,7 +539,7 @@ def HelixCurve( number=100, height=2.0, startangle=0.0, endangle=360.0, width=1.
newpoints = []
angle = (2.0/360.0)*(endangle-startangle)
step = angle/(number-1)
- h = height/angle
+ h = height/angle
start = (startangle*2.0/360.0)
a/=angle
i = 0
@@ -557,7 +557,7 @@ def HelixCurve( number=100, height=2.0, startangle=0.0, endangle=360.0, width=1.
def CycloidCurve( number=24, length=2.0, type=0, a=1.0, b=1.0, startangle=0.0, endangle=360.0 ):
"""
CycloidCurve( number=24, length=2.0, type=0, a=1.0, b=1.0, startangle=0.0, endangle=360.0 )
-
+
Create a Cycloid, Epicycloid or Hypocycloid curve
Parameters:
@@ -752,7 +752,7 @@ def main(context, self, align_matrix):
self.cyclo_b,
self.cycloStart,
self.cycloEnd)
-
+
# turn verts into array
vertArray = vertsToPoints(verts, splineType)
@@ -847,7 +847,7 @@ class Curveaceous_galore(bpy.types.Operator):
default=0.1,
min=0, soft_min=0,
description="var3 of MiscCurve")
-
+
#### Common properties
innerRadius = FloatProperty(name="Inner radius",
default=0.5,
@@ -861,7 +861,7 @@ class Curveaceous_galore(bpy.types.Operator):
default=1.0,
min=0, soft_min=0,
description="Outer radius")
-
+
#### Flower properties
petals = IntProperty(name="Petals",
default=8,
@@ -991,7 +991,7 @@ class Curveaceous_galore(bpy.types.Operator):
def draw(self, context):
layout = self.layout
- # general options
+ # general options
col = layout.column()
col.prop(self, 'GalloreType')
col.label(text=self.GalloreType + " Options:")
@@ -1107,7 +1107,7 @@ class Curveaceous_galore(bpy.types.Operator):
# main function
main(context, self, self.align_matrix)
-
+
# restore pre operator undo state
bpy.context.user_preferences.edit.use_global_undo = undo
diff --git a/add_curve_extra_objects/add_curve_torus_knots.py b/add_curve_extra_objects/add_curve_torus_knots.py
index cb974e13..591517cd 100644
--- a/add_curve_extra_objects/add_curve_torus_knots.py
+++ b/add_curve_extra_objects/add_curve_torus_knots.py
@@ -20,18 +20,17 @@
bl_info = {
"name": "Torus Knots",
"author": "testscreenings",
- "version": (0,1),
+ "version": (0, 1),
"blender": (2, 59, 0),
"location": "View3D > Add > Curve",
"description": "Adds many types of (torus) knots",
"warning": "",
- "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/"\
+ "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/"
"Scripts/Curve/Torus_Knot",
- "tracker_url": "https://projects.blender.org/tracker/index.php?"\
- "func=detail&aid=22403",
+ "tracker_url": "https://developer.blender.org/T22403",
"category": "Add Curve"}
-'''
-
+'''
+
##------------------------------------------------------------
#### import modules
import bpy
@@ -39,7 +38,7 @@ from bpy.props import *
from math import sin, cos, pi
from bpy_extras.object_utils import AddObjectHelper, object_data_add
-
+
########################################################################
####################### Knot Definitions ###############################
########################################################################
@@ -52,7 +51,7 @@ def Torus_Knot(self):
u = self.torus_u
v = self.torus_v
rounds = self.torus_rounds
-
+
newPoints = []
angle = 2*rounds
step = angle/(res-1)
@@ -61,11 +60,11 @@ def Torus_Knot(self):
for i in range(res-1):
t = ( i*step*pi)
-
+
x = (2 * scale + cos((q*t)/p*v)) * cos(t * u)
y = (2 * scale + cos((q*t)/p*v)) * sin(t * u)
z = sin(q*t/p) * height
-
+
newPoints.extend([x,y,z,1])
return newPoints
@@ -83,8 +82,8 @@ def create_torus_knot(self, context):
spline.use_endpoint_u = True
spline.use_cyclic_u = True
spline.order_u = 4
- curve_data.dimensions = '3D'
-
+ curve_data.dimensions = '3D'
+
if self.geo_surf:
curve_data.bevel_depth = self.geo_bDepth
curve_data.bevel_resolution = self.geo_bRes
@@ -92,7 +91,7 @@ def create_torus_knot(self, context):
curve_data.extrude = self.geo_extrude
#curve_data.offset = self.geo_width # removed, somehow screws things up all of a sudden
curve_data.resolution_u = self.geo_res
-
+
new_obj = object_data_add(context, curve_data, operator=self)
@@ -171,10 +170,10 @@ class torus_knot_plus(bpy.types.Operator, AddObjectHelper):
def draw(self, context):
layout = self.layout
- # general options
+ # general options
layout.label(text="Torus Knot Parameters:")
- # Parameters
+ # Parameters
box = layout.box()
box.prop(self, 'torus_res')
box.prop(self, 'torus_w')
@@ -201,7 +200,7 @@ class torus_knot_plus(bpy.types.Operator, AddObjectHelper):
col = layout.column()
col.prop(self, 'location')
col.prop(self, 'rotation')
-
+
##### POLL #####
@classmethod
def poll(cls, context):
@@ -212,13 +211,13 @@ class torus_knot_plus(bpy.types.Operator, AddObjectHelper):
# turn off undo
undo = bpy.context.user_preferences.edit.use_global_undo
bpy.context.user_preferences.edit.use_global_undo = False
-
+
if not self.options_plus:
self.torus_rounds = self.torus_p
#recoded for add_utils
create_torus_knot(self, context)
-
+
# restore pre operator undo state
bpy.context.user_preferences.edit.use_global_undo = undo
diff --git a/add_curve_ivygen.py b/add_curve_ivygen.py
index aade98a7..3c30e52b 100644
--- a/add_curve_ivygen.py
+++ b/add_curve_ivygen.py
@@ -24,13 +24,12 @@ bl_info = {
"version": (0, 1, 1),
"blender": (2, 59, 0),
"location": "View3D > Add > Curve",
- "description": "Adds generated ivy to a mesh object starting at the 3D"\
- " cursor",
+ "description": "Adds generated ivy to a mesh object starting "
+ "at the 3D cursor",
"warning": "",
- "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/"\
+ "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/"
"Scripts/Curve/Ivy_Gen",
- "tracker_url": "http://projects.blender.org/tracker/index.php?"\
- "func=detail&aid=27234",
+ "tracker_url": "https://developer.blender.org/T27234",
"category": "Add Curve"}
diff --git a/add_curve_sapling/__init__.py b/add_curve_sapling/__init__.py
index 12d75757..272d5ab9 100644
--- a/add_curve_sapling/__init__.py
+++ b/add_curve_sapling/__init__.py
@@ -26,12 +26,12 @@ bl_info = {
"Jason Weber & Joseph Penn in their paper 'Creation and Rendering of "
"Realistic Trees'."),
"warning": "length parameters may cause errors", # used for warning icon and text in addons panel
- "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/"\
+ "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/"
"Scripts/Curve/Sapling_Tree",
- "tracker_url": "http://projects.blender.org/tracker/"\
- "?func=detail&atid=469&aid=27226&group_id=153",
+ "tracker_url": "https://developer.blender.org/T27226",
"category": "Add Curve"}
+
if "bpy" in locals():
import imp
imp.reload(utils)
@@ -157,7 +157,7 @@ class AddTree(bpy.types.Operator):
def update_tree(self, context):
self.do_update = True
-
+
def no_update_tree(self, context):
self.do_update = False
@@ -407,20 +407,20 @@ class AddTree(bpy.types.Operator):
box = layout.box()
box.label("Geometry:")
box.prop(self, 'bevel')
-
+
row = box.row()
row.prop(self, 'bevelRes')
row.prop(self, 'resU')
-
+
box.prop(self, 'handleType')
box.prop(self, 'shape')
box.prop(self, 'seed')
box.prop(self, 'ratio')
-
+
row = box.row()
row.prop(self, 'scale')
row.prop(self, 'scaleV')
-
+
row = box.row()
row.prop(self, 'scale0')
row.prop(self, 'scaleV0')
@@ -455,15 +455,15 @@ class AddTree(bpy.types.Operator):
box.prop(self, 'levels')
box.prop(self, 'baseSplits')
box.prop(self, 'baseSize')
-
+
split = box.split()
-
+
col = split.column()
col.prop(self, 'branches')
col.prop(self, 'splitAngle')
col.prop(self, 'downAngle')
col.prop(self, 'rotate')
-
+
col = split.column()
col.prop(self, 'segSplits')
col.prop(self, 'splitAngleV')
@@ -477,19 +477,19 @@ class AddTree(bpy.types.Operator):
box.label("Branch Growth:")
box.prop(self, 'startCurv')
box.prop(self, 'attractUp')
-
+
split = box.split()
-
+
col = split.column()
col.prop(self, 'length')
col.prop(self, 'curve')
col.prop(self, 'curveBack')
-
+
col = split.column()
col.prop(self, 'lengthV')
col.prop(self, 'curveV')
col.prop(self, 'taper')
-
+
box.column().prop(self, 'curveRes')
elif self.chooseSet == '3':
@@ -499,7 +499,7 @@ class AddTree(bpy.types.Operator):
box.prop(self, 'pruneRatio')
box.prop(self, 'pruneWidth')
box.prop(self, 'pruneWidthPeak')
-
+
row = box.row()
row.prop(self, 'prunePowerHigh')
row.prop(self, 'prunePowerLow')
@@ -511,25 +511,25 @@ class AddTree(bpy.types.Operator):
box.prop(self, 'leafShape')
box.prop(self, 'leaves')
box.prop(self, 'leafDist')
-
+
row = box.row()
row.prop(self, 'leafScale')
row.prop(self, 'leafScaleX')
-
+
box.prop(self, 'bend')
elif self.chooseSet == '5':
box = layout.box()
box.label("Armature and Animation:")
-
+
row = box.row()
row.prop(self, 'useArm')
row.prop(self, 'armAnim')
-
+
row = box.row()
row.prop(self, 'windSpeed')
row.prop(self, 'windGust')
-
+
box.prop(self, 'frameRate')
def execute(self, context):
@@ -550,7 +550,7 @@ class AddTree(bpy.types.Operator):
addTree(self)
print("Tree creation in %0.1fs" %(time.time()-start_time))
return {'FINISHED'}
-
+
def invoke(self, context, event):
# global settings, useSet
# useSet = True
diff --git a/add_mesh_BoltFactory/__init__.py b/add_mesh_BoltFactory/__init__.py
index 7488856d..b243a03c 100644
--- a/add_mesh_BoltFactory/__init__.py
+++ b/add_mesh_BoltFactory/__init__.py
@@ -23,12 +23,12 @@ bl_info = {
"blender": (2, 63, 0),
"location": "View3D > Add > Mesh",
"description": "Add a bolt or nut",
- "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/"\
+ "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/"
"Scripts/Add_Mesh/BoltFactory",
- "tracker_url": "https://projects.blender.org/tracker/index.php?"\
- "func=detail&aid=22842",
+ "tracker_url": "https://developer.blender.org/T22842",
"category": "Add Mesh"}
+
if "bpy" in locals():
import imp
imp.reload(Boltfactory)
@@ -55,6 +55,6 @@ def unregister():
bpy.types.INFO_MT_mesh_add.remove(add_mesh_bolt_button)
#bpy.types.VIEW3D_PT_tools_objectmode.remove(add_mesh_bolt_button) #just for testing
-
+
if __name__ == "__main__":
register()
diff --git a/add_mesh_ant_landscape.py b/add_mesh_ant_landscape.py
index be6393ce..25fe5d3c 100644
--- a/add_mesh_ant_landscape.py
+++ b/add_mesh_ant_landscape.py
@@ -24,10 +24,9 @@ bl_info = {
"location": "View3D > Add > Mesh",
"description": "Add a landscape primitive",
"warning": "", # used for warning icon and text in addons panel
- "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/"\
+ "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/"
"Scripts/Add_Mesh/ANT_Landscape",
- "tracker_url": "https://projects.blender.org/tracker/index.php?"\
- "func=detail&aid=23130",
+ "tracker_url": "https://developer.blender.org/T23130",
"category": "Add Mesh"}
"""
@@ -356,7 +355,7 @@ def landscape_gen(x,y,z,falloffsize,options=[0,1.0,1, 0,0,1.0,0,6,1.0,2.0,1.0,2.
value = ( value * (1.0-0.5) + steps*0.5 ) * 2.0
elif stratatype == '2':
steps = -abs( sin( value*(strata)*pi ) * ( 0.1/(strata)*pi ) )
- value =( value * (1.0-0.5) + steps*0.5 ) * 2.0
+ value =( value * (1.0-0.5) + steps*0.5 ) * 2.0
elif stratatype == '3':
steps = abs( sin( value*(strata)*pi ) * ( 0.1/(strata)*pi ) )
value =( value * (1.0-0.5) + steps*0.5 ) * 2.0
@@ -484,7 +483,7 @@ class landscape_add(bpy.types.Operator):
("7","Marble","Marble"),
("8","Shattered_hTerrain","Shattered_hTerrain"),
("9","Strata_hTerrain","Strata_hTerrain")]
-
+
NoiseType = EnumProperty(name="Type",
description="Noise type",
items=NoiseTypes)
diff --git a/add_mesh_extra_objects/__init__.py b/add_mesh_extra_objects/__init__.py
index a8c71ae5..93cb7f23 100644
--- a/add_mesh_extra_objects/__init__.py
+++ b/add_mesh_extra_objects/__init__.py
@@ -26,10 +26,9 @@ bl_info = {
"location": "View3D > Add > Mesh > Extra Objects",
"description": "Add extra object types",
"warning": "",
- "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/"\
+ "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/"
"Scripts/Add_Mesh/Add_Extra",
- "tracker_url": "http://projects.blender.org/tracker/index.php?"\
- "func=detail&aid=22457",
+ "tracker_url": "https://developer.blender.org/T22457",
"category": "Add Mesh"}
@@ -89,7 +88,7 @@ class INFO_MT_mesh_gemstones_add(bpy.types.Menu):
layout.operator("mesh.primitive_gem_add",
text="Gem")
-
+
class INFO_MT_mesh_gears_add(bpy.types.Menu):
# Define the "Gears" menu
bl_idname = "INFO_MT_mesh_gears_add"
@@ -133,7 +132,7 @@ class INFO_MT_mesh_basic_add(bpy.types.Menu):
text="Trapezohedron")
layout.operator("mesh.primitive_polysphere_add",
text="Polysphere")
-
+
class INFO_MT_mesh_torus_add(bpy.types.Menu):
# Define the "Simple Objects" menu
bl_idname = "INFO_MT_mesh_torus_add"
diff --git a/add_mesh_extra_objects/add_mesh_3d_function_surface.py b/add_mesh_extra_objects/add_mesh_3d_function_surface.py
index 2fa128a4..64fe9879 100644
--- a/add_mesh_extra_objects/add_mesh_3d_function_surface.py
+++ b/add_mesh_extra_objects/add_mesh_3d_function_surface.py
@@ -24,10 +24,9 @@ bl_info = {
"location": "View3D > Add > Mesh",
"description": "Create Objects using Math Formulas",
"warning": "",
- "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.5/Py/"\
+ "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.5/Py/"
"Scripts/Add_Mesh/Add_3d_Function_Surface",
- "tracker_url": "https://projects.blender.org/tracker/index.php?"\
- "func=detail&aid=21444",
+ "tracker_url": "https://developer.blender.org/T21444",
"category": "Add Mesh"}
'''
diff --git a/add_mesh_extra_objects/add_mesh_gears.py b/add_mesh_extra_objects/add_mesh_gears.py
index 38a3d0be..7dfa46b0 100644
--- a/add_mesh_extra_objects/add_mesh_gears.py
+++ b/add_mesh_extra_objects/add_mesh_gears.py
@@ -27,10 +27,9 @@ bl_info = {
"location": "View3D > Add > Mesh > Gears ",
"description": "Adds a mesh Gear to the Add Mesh menu",
"warning": "",
- "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.5/Py/"\
+ "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.5/Py/"
"Scripts/Add_Mesh/Add_Gear",
- "tracker_url": "https://projects.blender.org/tracker/index.php?"\
- "func=detail&aid=21732",
+ "tracker_url": "https://developer.blender.org/T21732",
"category": "Add Mesh"}
'''
@@ -638,19 +637,19 @@ class AddGear(bpy.types.Operator):
def draw(self, context):
layout = self.layout
-
+
box = layout.box()
box.prop(self, 'number_of_teeth')
-
+
box = layout.box()
box.prop(self, 'radius')
box.prop(self, 'width')
box.prop(self, 'base')
-
+
box = layout.box()
box.prop(self, 'dedendum')
box.prop(self, 'addendum')
-
+
box = layout.box()
box.prop(self, 'angle')
box.prop(self, 'skew')
diff --git a/add_mesh_extra_objects/add_mesh_gemstones.py b/add_mesh_extra_objects/add_mesh_gemstones.py
index 7d3db788..7372d2f7 100644
--- a/add_mesh_extra_objects/add_mesh_gemstones.py
+++ b/add_mesh_extra_objects/add_mesh_gemstones.py
@@ -24,10 +24,9 @@ bl_info = {
"location": "View3D > Add > Mesh > Gemstones",
"description": "Adds various gemstone (Diamond & Gem) meshes.",
"warning": "",
- "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.5/Py/"\
+ "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.5/Py/"
"Scripts/Add_Mesh/Add_Gemstones",
- "tracker_url": "https://projects.blender.org/tracker/index.php?"\
- "func=detail&aid=21432",
+ "tracker_url": "https://developer.blender.org/T21432",
"category": "Add Mesh"}
'''
import bpy
diff --git a/add_mesh_extra_objects/add_mesh_honeycomb.py b/add_mesh_extra_objects/add_mesh_honeycomb.py
index e810e0de..d42f66d5 100644
--- a/add_mesh_extra_objects/add_mesh_honeycomb.py
+++ b/add_mesh_extra_objects/add_mesh_honeycomb.py
@@ -21,13 +21,11 @@ bl_info = {
"author": "Kayo Phoenix <kayo@illumium.org>",
"version": (0, 1),
"blender": (2, 57, 0),
- "api": 35853,
"location": "View3D > Add > Mesh > HoneyComb",
"description": "Adds HoneyComb Mesh",
"warning": "",
"wiki_url": "http://wiki.blender.org/index.php/Extensions:2.5/Py/Scripts/Add_Mesh/HoneyComb",
- "category": "Add Mesh"
- }
+ "category": "Add Mesh"}
'''
from math import pi, sin, cos
@@ -37,37 +35,37 @@ class honeycomb_geometry():
self.cols = cols
self.D = D
self.E = E
-
+
self.hE = 0.5 * self.E
self.R = 0.5 * self.D
-
+
self.a = sin(pi / 3)
-
+
self.d = self.a * self.D
self.hd = 0.5 * self.d
self.e = self.hE / self.a
self.he = 0.5 * self.e
self.r = self.R - self.e
self.hr = 0.5 * self.r
-
-
+
+
self.H = self.R * (1.5 * self.rows + 0.5) + self.e
if self.rows > 1:
self.W = self.d * (self.cols + 0.5) + self.E
else:
self.W = self.d * self.cols + self.E
-
+
self.hH = 0.5 * self.H
self.hW = 0.5 * self.W
-
+
self.sy = -self.hH + self.he + self.R
self.sx = -self.hW + self.hE + self.hd
-
+
self.gx = self.hd
-
+
self.dy = 1.5 * self.R
self.dx = self.d
-
+
def vert(self, row, col):
# full cell
if row >= 0 and row < self.rows and col >= 0 and col < self.cols: return [0, 1, 2, 3, 4, 5]
@@ -101,20 +99,20 @@ class honeycomb_geometry():
if row % 2 or self.rows == 1: return [2, 3]
else: return [1, 2, 3, 4]
return []
-
+
def cell(self, row, col, idx):
cp = [self.sx + self.dx * col, self.sy + self.dy * row, 0] # central point
if row % 2: cp[0] += self.gx
co = [] # vertexes coords
vi = self.vert(row, col)
ap = {}
-
+
for i in vi:
a = pi / 6 + i * pi / 3 # angle
ap[i] = idx + len(co)
co.append((cp[0] + cos(a) * self.r, cp[1] + sin(a) * self.r, cp[2]))
return co, ap
-
+
def generate(self):
ar = 1
ac = 1
@@ -122,7 +120,7 @@ class honeycomb_geometry():
cells = []
verts = []
faces = []
-
+
for row in range(-ar, self.rows + ar):
level = []
for col in range(-ac, self.cols + ac):
@@ -130,14 +128,14 @@ class honeycomb_geometry():
verts += co
level.append(ap)
cells.append(level)
-
+
# bottom row
row = 0
for col in range(1, len(cells[row]) - 1):
s = cells[row][col]
l = cells[row][col - 1]
u = cells[row + 1][col]
-
+
faces.append((s[1], u[5], u[4], s[2]))
faces.append((s[2], u[4], l[0]))
@@ -151,7 +149,7 @@ class honeycomb_geometry():
d = cells[row - 1][col - cs]
faces.append((s[3], l[5], d[1]))
faces.append([s[3], d[1], d[0], s[4]])
-
+
# middle rows
for row in range(1, len(cells) - 1):
cs = 0
@@ -161,25 +159,25 @@ class honeycomb_geometry():
l = cells[row][col - 1]
u = cells[row + 1][col - cs]
d = cells[row - 1][col - cs]
-
+
faces.append((s[1], u[5], u[4], s[2]))
faces.append((s[2], u[4], l[0]))
faces.append([s[2], l[0], l[5], s[3]])
faces.append((s[3], l[5], d[1]))
faces.append([s[3], d[1], d[0], s[4]])
-
+
# right column
row = 0
col = len(cells[row]) - 1
for row in range(1, len(cells) - 1):
cs = 0
if row % 2: cs += 1
-
+
s = cells[row][col]
l = cells[row][col - 1]
u = cells[row + 1][col - cs]
d = cells[row - 1][col - cs]
-
+
if row % 2 and row < len(cells) - 2:
faces.append((s[1], u[5], u[4], s[2]))
faces.append((s[2], u[4], l[0]))
@@ -187,7 +185,7 @@ class honeycomb_geometry():
faces.append((s[3], l[5], d[1]))
if row % 2 and row > 1:
faces.append([s[3], d[1], d[0], s[4]])
-
+
# final fix
if not self.rows % 2:
row = len(cells) - 1
@@ -196,7 +194,7 @@ class honeycomb_geometry():
d = cells[row - 1][col - 1]
faces.append((s[3], l[5], d[1]))
faces.append([s[3], d[1], d[0], s[4]])
-
+
return verts, faces
import bpy
@@ -211,31 +209,31 @@ class add_mesh_honeycomb(bpy.types.Operator):
bl_idname = 'mesh.honeycomb_add'
bl_label = 'Add HoneyComb'
bl_options = {'REGISTER', 'UNDO'}
-
+
rows = IntProperty(
name = 'Num of rows', default = 2,
min = 1, max = 100,
description='Number of the rows')
-
+
cols = IntProperty(
name = 'Num of cols', default = 2,
min = 1, max = 100,
description='Number of the columns')
-
+
def fix_edge(self, context):
m = edge_max(self.diam)
if self.edge > m: self.edge = m
-
+
diam = FloatProperty(
name = 'Cell Diameter', default = 1.0,
min = 0.0, update = fix_edge,
description='Diameter of the cell')
-
+
edge = FloatProperty(
name = 'Edge Width', default = 0.1,
min = 0.0, update = fix_edge,
description='Width of the edge')
-
+
# generic transform props
view_align = BoolProperty(
name="Align to View",
@@ -246,24 +244,24 @@ class add_mesh_honeycomb(bpy.types.Operator):
rotation = FloatVectorProperty(
name="Rotation",
subtype='EULER')
-
+
##### POLL #####
@classmethod
def poll(cls, context):
return context.scene is not None
-
+
##### EXECUTE #####
def execute(self, context):
mesh = bpy.data.meshes.new(name='honeycomb')
-
+
comb = honeycomb_geometry(self.rows, self.cols, self.diam, self.edge)
verts, faces = comb.generate()
-
+
mesh.from_pydata(vertices = verts, edges = [], faces = faces)
mesh.update()
-
+
object_utils.object_data_add(context, mesh, operator=self)
-
+
return {'FINISHED'}
'''
def menu_func(self, context):
@@ -271,14 +269,14 @@ def menu_func(self, context):
def register():
bpy.utils.register_module(__name__)
-
+
bpy.types.INFO_MT_mesh_add.append(menu_func)
def unregister():
bpy.utils.unregister_module(__name__)
-
+
bpy.types.INFO_MT_mesh_add.remove(menu_func)
-
+
if __name__ == "__main__":
register()
'''
diff --git a/add_mesh_extra_objects/add_mesh_polysphere.py b/add_mesh_extra_objects/add_mesh_polysphere.py
index 3454618c..5c5b1c2e 100644
--- a/add_mesh_extra_objects/add_mesh_polysphere.py
+++ b/add_mesh_extra_objects/add_mesh_polysphere.py
@@ -18,17 +18,17 @@
'''
bl_info = {
"name": "Add PolySphere",
- "author": "Andy Davies (metalliandy)",
+ "author": "Andy Davies (metalliandy)",
"version": (0,1,6),
"blender": (2, 62, 0),
"location": "View3D > Add > Mesh > PolySphere",
"description": "Adds a PolySphere (all quads) for sculpting",
"warning": "",
- "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.5/Py/"\
+ "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.5/Py/"
"Scripts/Add_Mesh/Add_PolySphere",
"tracker_url": "",
"category": "Add Mesh"}
-'''
+'''
import bpy
@@ -50,7 +50,7 @@ def Add_PolySphere():
subsurf = cube.modifiers['Subsurf']
#Changes Subsurf levels
- subsurf.levels = 3
+ subsurf.levels = 3
#Applys Subsurf Modifier
bpy.ops.object.modifier_apply(apply_as='DATA', modifier="Subsurf")
@@ -69,16 +69,16 @@ def Add_PolySphere():
#Change to Objectmode
bpy.ops.object.editmode_toggle()
-
+
#Scales Object to 2.0 Units
bpy.ops.transform.resize(value=(1.15525, 1.15525, 1.15525), constraint_axis=(False, False, False), constraint_orientation='GLOBAL', mirror=False, proportional='DISABLED', proportional_edit_falloff='SMOOTH', proportional_size=1, snap=False, snap_target='CLOSEST', snap_point=(0, 0, 0), snap_align=False, snap_normal=(0, 0, 0), release_confirm=False)
-
+
#Applys location, rotation and scale data
bpy.ops.object.transform_apply(location=True, rotation=True, scale=True)
#makes PolySphere an operator
class AddPolySphere(bpy.types.Operator):
-
+
bl_idname = "mesh.primitive_polysphere_add"
bl_label = "Add PolySphere"
bl_options = {'REGISTER', 'UNDO'}
diff --git a/add_mesh_extra_objects/add_mesh_twisted_torus.py b/add_mesh_extra_objects/add_mesh_twisted_torus.py
index 4ebf7e3e..984c2655 100644
--- a/add_mesh_extra_objects/add_mesh_twisted_torus.py
+++ b/add_mesh_extra_objects/add_mesh_twisted_torus.py
@@ -28,12 +28,11 @@ bl_info = {
"location": "View3D > Add > Mesh ",
"description": "Adds a mesh Twisted Torus to the Add Mesh menu",
"warning": "",
- "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.5/Py/"\
+ "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.5/Py/"
"Scripts/Add_Mesh/Add_Twisted_Torus",
- "tracker_url": "https://projects.blender.org/tracker/index.php?"\
- "func=detail&aid=21622",
+ "tracker_url": "https://developer.blender.org/T21622",
"category": "Add Mesh"}
-
+
Usage:
* Launch from Add Mesh menu
diff --git a/add_mesh_pipe_joint.py b/add_mesh_pipe_joint.py
index 9d1fcc0d..adb9aea1 100644
--- a/add_mesh_pipe_joint.py
+++ b/add_mesh_pipe_joint.py
@@ -24,10 +24,9 @@ bl_info = {
"location": "View3D > Add > Mesh > Pipe Joints",
"description": "Add different types of pipe joints",
"warning": "",
- "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/"\
+ "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/"
"Scripts/Add_Mesh/Add_Pipe_Joints",
- "tracker_url": "https://projects.blender.org/tracker/index.php?"\
- "func=detail&aid=21443",
+ "tracker_url": "https://developer.blender.org/T21443",
"category": "Add Mesh"}
import bpy
diff --git a/add_mesh_solid.py b/add_mesh_solid.py
index cb874fed..42501626 100644
--- a/add_mesh_solid.py
+++ b/add_mesh_solid.py
@@ -25,10 +25,9 @@ bl_info = {
"location": "View3D > Add > Mesh > Solids",
"description": "Add a regular solid",
"warning": "",
- "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/"\
+ "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/"
"Scripts/Add_Mesh/Add_Solid",
- "tracker_url": "https://projects.blender.org/tracker/index.php?"\
- "func=detail&aid=22405",
+ "tracker_url": "https://developer.blender.org/T22405",
"category": "Add Mesh"}
import bpy
@@ -61,14 +60,14 @@ def createPolys(poly):
else:
f = [[i[x],i[x+1],i[L-2-x],i[L-1-x]] for x in range(L//2-1)]
faces.extend(f)
- if L&1 == 1:
+ if L&1 == 1:
faces.append([i[L//2-1+x] for x in [0,1,2]])
return faces
-
-# function to make the reduce function work as a workaround to sum a list of vectors
+
+# function to make the reduce function work as a workaround to sum a list of vectors
def vSum(list):
return reduce(lambda a,b: a+b, list)
-
+
# creates the 5 platonic solids as a base for the rest
# plato: should be one of {"4","6","8","12","20"}. decides what solid the
# outcome will be.
@@ -92,7 +91,7 @@ def source(plato):
elif plato == "6":
# Calculate the necessary constants
s = 1/sqrt(3)
-
+
# create the vertices and faces
v = [(-s,-s,-s),(s,-s,-s),(s,s,-s),(-s,s,-s),(-s,-s,s),(s,-s,s),(s,s,s),(-s,s,s)]
faces = [[0,3,2,1],[0,1,5,4],[0,4,7,3],[6,5,1,2],[6,2,3,7],[6,7,4,5]]
@@ -137,7 +136,7 @@ def source(plato):
verts = [Vector(i) for i in v]
return verts,faces
-
+
# processes the raw data from source
def createSolid(plato,vtrunc,etrunc,dual,snub):
# the duals from each platonic solid
@@ -177,16 +176,16 @@ def createSolid(plato,vtrunc,etrunc,dual,snub):
return vInput, fInput
vInput = [-i*supposedSize for i in vInput]
return vInput, fInput
-
+
# generate connection database
vDict = [{} for i in vInput]
- # for every face, store what vertex comes after and before the current vertex
+ # for every face, store what vertex comes after and before the current vertex
for x in range(len(fInput)):
i = fInput[x]
for j in range(len(i)):
vDict[i[j-1]][i[j]] = [i[j-2],x]
- if len(vDict[i[j-1]]) == 1: vDict[i[j-1]][-1] = i[j]
-
+ if len(vDict[i[j-1]]) == 1: vDict[i[j-1]][-1] = i[j]
+
# the actual connection database: exists out of:
# [vtrunc pos, etrunc pos, connected vert IDs, connected face IDs]
vData = [[[],[],[],[]] for i in vInput]
@@ -205,8 +204,8 @@ def createSolid(plato,vtrunc,etrunc,dual,snub):
if current == i[-1]: break # if we're back at the first: stop the loop
fvOutput.append([]) # new face from truncated vert
fOffset = x*(len(i)-1) # where to start off counting faceVerts
- # only create one vert where one is needed (v1 todo: done)
- if etrunc == 0.5:
+ # only create one vert where one is needed (v1 todo: done)
+ if etrunc == 0.5:
for j in range(len(i)-1):
vOutput.append((vData[x][0][j]+vData[x][0][j-1])*etrunc) # create vert
fvOutput[x].append(fOffset+j) # add to face
@@ -280,7 +279,7 @@ def createSolid(plato,vtrunc,etrunc,dual,snub):
if supposedSize and not dual: # this to make the vtrunc > 1 work
supposedSize *= len(fvOutput[0])/vSum(vOutput[i] for i in fvOutput[0]).length
vOutput = [-i*supposedSize for i in vOutput]
-
+
# create new faces by replacing old vert IDs by newly generated verts
ffOutput = [[] for i in fInput]
for x in range(len(fInput)):
@@ -296,16 +295,16 @@ def createSolid(plato,vtrunc,etrunc,dual,snub):
for i in fInput[x]:
ffOutput[x].append(fvOutput[i][vData[i][3].index(x)])
ffOutput[x].append(fvOutput[i][vData[i][3].index(x)-1])
-
+
if not dual:
return vOutput,fvOutput + feOutput + ffOutput
- else:
+ else:
# do the same procedure as above, only now on the generated mesh
# generate connection database
vDict = [{} for i in vOutput]
dvOutput = [0 for i in fvOutput + feOutput + ffOutput]
dfOutput = []
-
+
for x in range(len(dvOutput)): # for every face
i = (fvOutput + feOutput + ffOutput)[x] # choose face to work with
# find vertex from face
@@ -314,12 +313,12 @@ def createSolid(plato,vtrunc,etrunc,dual,snub):
for j in range(len(i)): # create vert chain
vDict[i[j-1]][i[j]] = [i[j-2],x]
if len(vDict[i[j-1]]) == 1: vDict[i[j-1]][-1] = i[j]
-
+
# calculate supposed size for continuity
supposedSize = vSum([vInput[i] for i in fInput[0]]).length/len(fInput[0])
supposedSize /= dvOutput[-1].length
dvOutput = [i*supposedSize for i in dvOutput]
-
+
# use chains to create faces
for x in range(len(vOutput)):
i = vDict[x]
@@ -330,7 +329,7 @@ def createSolid(plato,vtrunc,etrunc,dual,snub):
current = i[current][0]
if current == i[-1]: break
dfOutput.append(face)
-
+
return dvOutput,dfOutput
class Solids(bpy.types.Operator):
@@ -412,7 +411,7 @@ class Solids(bpy.types.Operator):
("ds12","Pentagonal Hexecontahedron","")),
name = "Presets",
description = "Parameters for some hard names")
-
+
# actual preset values
p = {"t4":["4",2/3,0,0,"None"],
"r4":["4",1,1,0,"None"],
@@ -440,7 +439,7 @@ class Solids(bpy.types.Operator):
"db12":["12",1.1338,1,1,"None"],
"dc12":["20",0.921,0.553,1,"None"],
"ds12":["12",1.1235,0.68,1,"Left"]}
-
+
#previous preset, for User-friendly reasons
previousSetting = ""
@@ -459,25 +458,25 @@ class Solids(bpy.types.Operator):
self.eTrunc = using[2]
self.dual = using[3]
self.snub = using[4]
- else:
+ else:
using = self.p[self.preset]
result0 = self.source == using[0]
result1 = abs(self.vTrunc - using[1]) < 0.004
result2 = abs(self.eTrunc - using[2]) < 0.0015
- result4 = using[4] == self.snub or ((using[4] == "Left") and
+ result4 = using[4] == self.snub or ((using[4] == "Left") and
self.snub in ["Left","Right"])
- if (result0 and result1 and result2 and result4):
+ if (result0 and result1 and result2 and result4):
if self.p[self.previousSetting][3] != self.dual:
- if self.preset[0] == "d":
+ if self.preset[0] == "d":
self.preset = self.preset[1:]
else:
self.preset = "d" + self.preset
- else:
+ else:
self.preset = "0"
self.previousSetting = self.preset
-
- # generate mesh
+
+ # generate mesh
verts,faces = createSolid(self.source,
self.vTrunc,
self.eTrunc,
@@ -486,7 +485,7 @@ class Solids(bpy.types.Operator):
# turn n-gons in quads and tri's
faces = createPolys(faces)
-
+
# resize to normal size, or if keepSize, make sure all verts are of length 'size'
if self.keepSize:
rad = self.size/verts[-1 if self.dual else 0].length
@@ -502,12 +501,12 @@ class Solids(bpy.types.Operator):
# Update mesh geometry after adding stuff.
mesh.update()
-
+
object_data_add(context, mesh, operator=None)
# object generation done
# turn undo back on
- bpy.context.user_preferences.edit.use_global_undo = True
+ bpy.context.user_preferences.edit.use_global_undo = True
return {'FINISHED'}
@@ -564,7 +563,7 @@ class CatalanMenu(bpy.types.Menu):
"""Defines Catalan preset menu"""
bl_idname = "Catalan_calls"
bl_label = "Catalan"
-
+
def draw(self, context):
layout = self.layout
layout.operator_context = 'INVOKE_REGION_WIN'
@@ -581,7 +580,7 @@ class CatalanMenu(bpy.types.Menu):
layout.operator(Solids.bl_idname, text = "Deltoidal Hexecontahedron").preset = "db12"
layout.operator(Solids.bl_idname, text = "Disdyakis Triacontahedron").preset = "dc12"
layout.operator(Solids.bl_idname, text = "Pentagonal Hexecontahedron").preset = "ds12"
-
+
def menu_func(self, context):
self.layout.menu(Solids_add_menu.bl_idname, icon="PLUGIN")
diff --git a/animation_add_corrective_shape_key.py b/animation_add_corrective_shape_key.py
index a576d352..cbe5435b 100644
--- a/animation_add_corrective_shape_key.py
+++ b/animation_add_corrective_shape_key.py
@@ -27,8 +27,7 @@ bl_info = {
'description': 'Creates a corrective shape key for the current pose',
"wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/"
"Scripts/Animation/Corrective_Shape_Key",
- "tracker_url": "https://projects.blender.org/tracker/index.php?"
- "func=detail&aid=22129",
+ "tracker_url": "https://developer.blender.org/T22129",
'category': 'Animation'}
"""
diff --git a/animation_animall.py b/animation_animall.py
index 5373e1b5..6f3706a6 100644
--- a/animation_animall.py
+++ b/animation_animall.py
@@ -25,8 +25,7 @@ bl_info = {
'description': 'Allows animation of mesh, lattice, curve and surface data',
'warning': '',
'wiki_url': 'http://wiki.blender.org/index.php/Extensions:2.6/Py/Scripts/Animation/AnimAll',
- 'tracker_url': 'http://projects.blender.org/tracker/index.php?'\
- 'func=detail&aid=24874',
+ 'tracker_url': 'https://developer.blender.org/T24874',
'category': 'Animation'}
"""-------------------------------------------------------------------------
@@ -468,4 +467,4 @@ def unregister():
pass
if __name__ == "__main__":
- register() \ No newline at end of file
+ register()
diff --git a/curve_simplify.py b/curve_simplify.py
index 331c988e..25e6dbd5 100644
--- a/curve_simplify.py
+++ b/curve_simplify.py
@@ -24,10 +24,9 @@ bl_info = {
"location": "Search > Simplify Curves",
"description": "Simplifies 3D Curve objects and animation F-Curves",
"warning": "",
- "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/"\
+ "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/"
"Scripts/Curve/Curve_Simplify",
- "tracker_url": "https://projects.blender.org/tracker/index.php?"\
- "func=detail&aid=22327",
+ "tracker_url": "https://developer.blender.org/T22327",
"category": "Add Curve"}
"""
diff --git a/development_api_navigator.py b/development_api_navigator.py
index 2f121724..dfe670e3 100644
--- a/development_api_navigator.py
+++ b/development_api_navigator.py
@@ -29,8 +29,7 @@ bl_info = {
"warning": "",
"wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/"
"Scripts/Text_Editor/API_Navigator",
- "tracker_url": "http://projects.blender.org/tracker/index.php?"\
- "func=detail&aid=24982",
+ "tracker_url": "https://developer.blender.org/T24982",
"category": "Development"}
"""
diff --git a/development_icon_get.py b/development_icon_get.py
index 927c218b..ef5f8f30 100644
--- a/development_icon_get.py
+++ b/development_icon_get.py
@@ -31,8 +31,7 @@ bl_info = {
'to the clipboard',
'wiki_url': 'http://wiki.blender.org/index.php/Extensions:2.6/'\
'Py/Scripts/System/Display_All_Icons',
- 'tracker_url': 'http://projects.blender.org/tracker/index.php?'\
- 'func=detail&aid=22011',
+ 'tracker_url': 'https://developer.blender.org/T22011',
'category': 'Development'}
diff --git a/game_engine_save_as_runtime.py b/game_engine_save_as_runtime.py
index efc8690f..2d3c6275 100644
--- a/game_engine_save_as_runtime.py
+++ b/game_engine_save_as_runtime.py
@@ -26,8 +26,7 @@ bl_info = {
'warning': '',
'wiki_url': 'http://wiki.blender.org/index.php/Extensions:2.6/Py/'\
'Scripts/Game_Engine/Save_As_Runtime',
- 'tracker_url': 'https://projects.blender.org/tracker/index.php?'\
- 'func=detail&aid=23564',
+ 'tracker_url': 'https://developer.blender.org/T23564',
'category': 'Game Engine'}
import bpy
diff --git a/io_anim_acclaim/__init__.py b/io_anim_acclaim/__init__.py
index d929a44c..b08452a5 100644
--- a/io_anim_acclaim/__init__.py
+++ b/io_anim_acclaim/__init__.py
@@ -30,9 +30,8 @@ bl_info = {
"location": "File > Import-Export",
"description": "Imports Acclaim Skeleton and Motion Capture Files",
"wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/"
- "Scripts/Import-Export/Acclaim_Importer",
- "tracker_url": "http://projects.blender.org/tracker/index.php?"
- "func=detail&aid=27127&group_id=153&atid=467",
+ "Scripts/Import-Export/Acclaim_Importer",
+ "tracker_url": "https://developer.blender.org/T27127",
"category": "Import-Export"}
diff --git a/io_anim_c3d/__init__.py b/io_anim_c3d/__init__.py
index 81b41979..16878437 100644
--- a/io_anim_c3d/__init__.py
+++ b/io_anim_c3d/__init__.py
@@ -30,9 +30,8 @@ bl_info = {
"location": "File > Import",
"description": "Imports C3D Graphics Lab Motion Capture files",
"wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/"
- "Scripts/Import-Export/C3D_Importer",
- "tracker_url": "http://projects.blender.org/tracker/?func=detail&atid=467"
- "&aid=29061&group_id=153",
+ "Scripts/Import-Export/C3D_Importer",
+ "tracker_url": "https://developer.blender.org/T29061",
"category": 'Import-Export'}
diff --git a/io_anim_camera.py b/io_anim_camera.py
index 376a0564..c4d75f09 100644
--- a/io_anim_camera.py
+++ b/io_anim_camera.py
@@ -26,10 +26,9 @@ bl_info = {
"location": "File > Export > Cameras & Markers (.py)",
"description": "Export Cameras & Markers (.py)",
"warning": "",
- "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/"\
+ "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/"
"Scripts/Import-Export/Camera_Animation",
- "tracker_url": "https://projects.blender.org/tracker/index.php?"\
- "func=detail&aid=22835",
+ "tracker_url": "https://developer.blender.org/T22835",
"support": 'OFFICIAL',
"category": "Import-Export"}
diff --git a/io_anim_nuke_chan/__init__.py b/io_anim_nuke_chan/__init__.py
index 54bd709d..c02deb5d 100644
--- a/io_anim_nuke_chan/__init__.py
+++ b/io_anim_nuke_chan/__init__.py
@@ -27,9 +27,8 @@ bl_info = {
"description": "Import/Export object's animation with nuke",
"warning": "",
"wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/"
- "Scripts/Import-Export/Nuke",
- "tracker_url": "http://projects.blender.org/tracker/?"
- "func=detail&atid=467&aid=28368&group_id=153",
+ "Scripts/Import-Export/Nuke",
+ "tracker_url": "https://developer.blender.org/T28368",
"category": "Import-Export"}
diff --git a/io_coat3D/__init__.py b/io_coat3D/__init__.py
index 33faf0ec..e4fa915f 100644
--- a/io_coat3D/__init__.py
+++ b/io_coat3D/__init__.py
@@ -24,14 +24,12 @@ bl_info = {
"location": "Scene > 3D-Coat Applink",
"description": "Transfer data between 3D-Coat/Blender",
"warning": "",
- "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/" \
+ "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/"
"Scripts/Import-Export/3dcoat_applink",
- "tracker_url": "https://projects.blender.org/tracker/?"\
- "func=detail&aid=24446",
+ "tracker_url": "https://developer.blender.org/T24446",
"category": "Import-Export"}
-
if "bpy" in locals():
import imp
imp.reload(coat)
@@ -43,13 +41,13 @@ else:
import bpy
from bpy.props import *
-
+
def register():
bpy.coat3D = dict()
bpy.coat3D['active_coat'] = ''
bpy.coat3D['status'] = 0
bpy.coat3D['kuva'] = 1
-
+
class ObjectCoat3D(bpy.types.PropertyGroup):
objpath = StringProperty(name="Object_Path")
applink_name = StringProperty(name="Object_Applink_name")
@@ -71,7 +69,7 @@ def register():
subtype="DIR_PATH",
)
cursor_loc = FloatVectorProperty(name="Cursor_loc",description="location")
-
+
exchangedir = StringProperty(
name="FilePath",
subtype="DIR_PATH"
@@ -82,8 +80,8 @@ def register():
)
-
-
+
+
wasactive = StringProperty(
name="Pass active object",
)
@@ -248,7 +246,7 @@ def unregister():
del bpy.types.Object.coat3D
del bpy.types.Scene.coat3D
del bpy.coat3D
-
+
bpy.utils.unregister_module(__name__)
diff --git a/io_convert_image_to_mesh_img/__init__.py b/io_convert_image_to_mesh_img/__init__.py
index 08f9c2b4..1191d033 100644
--- a/io_convert_image_to_mesh_img/__init__.py
+++ b/io_convert_image_to_mesh_img/__init__.py
@@ -24,10 +24,9 @@ bl_info = {
"location": "File > Import > HiRISE DTM from PDS IMG (.IMG)",
"description": "Import a HiRISE DTM formatted as a PDS IMG file",
"warning": "May consume a lot of memory",
- "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/"\
+ "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/"
"Scripts/Import-Export/HiRISE_DTM_from_PDS_IMG",
- "tracker_url": "https://projects.blender.org/tracker/index.php?"\
- "func=detail&aid=24897&group_id=153&atid=469",
+ "tracker_url": "https://developer.blender.org/T24897",
"category": "Import-Export"}
diff --git a/io_curve_svg/__init__.py b/io_curve_svg/__init__.py
index 4a90e5ec..0c05e8da 100644
--- a/io_curve_svg/__init__.py
+++ b/io_curve_svg/__init__.py
@@ -25,13 +25,13 @@ bl_info = {
"location": "File > Import > Scalable Vector Graphics (.svg)",
"description": "Import SVG as curves",
"warning": "",
- "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/"\
+ "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/"
"Scripts/Import-Export/SVG",
- "tracker_url": "http://projects.blender.org/tracker/index.php?"\
- "func=detail&aid=26166&",
+ "tracker_url": "https://developer.blender.org/T26166",
"support": 'OFFICIAL',
"category": "Import-Export"}
+
# To support reload properly, try to access a package var,
# if it's there, reload everything
if "bpy" in locals():
diff --git a/io_export_after_effects.py b/io_export_after_effects.py
index 1594b625..8d041952 100644
--- a/io_export_after_effects.py
+++ b/io_export_after_effects.py
@@ -20,16 +20,16 @@
bl_info = {
"name": "Export: Adobe After Effects (.jsx)",
- "description": "Export cameras, selected objects & camera solution 3D Markers to Adobe After Effects CS3 and above",
+ "description": "Export cameras, selected objects & camera solution "
+ "3D Markers to Adobe After Effects CS3 and above",
"author": "Bartek Skorupa",
"version": (0, 64),
"blender": (2, 69, 0),
"location": "File > Export > Adobe After Effects (.jsx)",
"warning": "",
- "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/"\
+ "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/"
"Scripts/Import-Export/Adobe_After_Effects",
- "tracker_url": "https://projects.blender.org/tracker/index.php?"\
- "func=detail&aid=29858",
+ "tracker_url": "https://developer.blender.org/T29858",
"category": "Import-Export",
}
diff --git a/io_export_dxf/__init__.py b/io_export_dxf/__init__.py
index c79ba5ea..6ce740ef 100644
--- a/io_export_dxf/__init__.py
+++ b/io_export_dxf/__init__.py
@@ -23,10 +23,11 @@ bl_info = {
"location": "File > Export > Autodesk (.dxf)",
"description": "The script exports Blender geometry to DXF format r12 version.",
"warning": "Under construction! Visit Wiki for details.",
- "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/Scripts/Import-Export/DXF_Exporter",
- "tracker_url": "https://projects.blender.org/tracker/index.php?func=detail&aid=28469",
- "category": "Import-Export"
-}
+ "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/"
+ "Scripts/Import-Export/DXF_Exporter",
+ "tracker_url": "https://developer.blender.org/T28469",
+ "category": "Import-Export"}
+
import bpy
from .operator import DXFExporter
diff --git a/io_export_pc2.py b/io_export_pc2.py
index 37953d53..80f74dc3 100644
--- a/io_export_pc2.py
+++ b/io_export_pc2.py
@@ -24,13 +24,16 @@ bl_info = {
"location": "File > Export > Pointcache (.pc2)",
"description": "Export mesh Pointcache data (.pc2)",
"warning": "",
- "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/"\
+ "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/"
"Scripts/Import-Export/PC2_Pointcache_export",
- "tracker_url": "https://projects.blender.org/tracker/index.php?"\
- "func=detail&aid=24703",
+ "tracker_url": "",
"category": "Import-Export"}
"""
+Related links:
+https://developer.blender.org/T34456
+https://developer.blender.org/T25408
+
Usage Notes:
in Maya Mel:
@@ -62,7 +65,7 @@ def do_export(context, props, filepath):
vertCount = len(me.vertices)
sampletimes = getSampling(start, end, sampling)
sampleCount = len(sampletimes)
-
+
# Create the header
headerFormat='<12siiffi'
headerStr = struct.pack(headerFormat, b'POINTCACHE2\0',
@@ -70,11 +73,11 @@ def do_export(context, props, filepath):
file = open(filepath, "wb")
file.write(headerStr)
-
+
for frame in sampletimes:
sc.frame_set(frame)
me = ob.to_mesh(sc, apply_modifiers, 'PREVIEW')
-
+
if len(me.vertices) != vertCount:
file.close()
try:
@@ -85,18 +88,18 @@ def do_export(context, props, filepath):
empty.close()
print('Export failed. Vertexcount of Object is not constant')
return False
-
+
if props.world_space:
me.transform(ob.matrix_world)
if props.rot_x90:
me.transform(mat_x90)
-
+
for v in me.vertices:
thisVertex = struct.pack('<fff', float(v.co[0]),
float(v.co[1]),
float(v.co[2]))
file.write(thisVertex)
-
+
file.flush()
file.close()
return True
@@ -109,7 +112,7 @@ class Export_pc2(bpy.types.Operator, ExportHelper):
bl_label = "Export Pointcache (.pc2)"
filename_ext = ".pc2"
-
+
rot_x90 = BoolProperty(name="Convert to Y-up",
description="Rotate 90 degrees around X to convert to y-up",
default=True,
@@ -147,7 +150,7 @@ class Export_pc2(bpy.types.Operator, ExportHelper):
),
default='1',
)
-
+
@classmethod
def poll(cls, context):
return context.active_object.type in {'MESH', 'CURVE', 'SURFACE', 'FONT'}
@@ -160,11 +163,11 @@ class Export_pc2(bpy.types.Operator, ExportHelper):
filepath = bpy.path.ensure_ext(filepath, self.filename_ext)
exported = do_export(context, props, filepath)
-
+
if exported:
print('finished export in %s seconds' %((time.time() - start_time)))
print(filepath)
-
+
return {'FINISHED'}
def invoke(self, context, event):
@@ -202,6 +205,6 @@ def unregister():
bpy.types.INFO_MT_file_export.remove(menu_func)
#bpy.types.VIEW3D_PT_tools_objectmode.remove(menu_func)
-
+
if __name__ == "__main__":
register()
diff --git a/io_export_unreal_psk_psa.py b/io_export_unreal_psk_psa.py
index 787ca24d..d2982b34 100644
--- a/io_export_unreal_psk_psa.py
+++ b/io_export_unreal_psk_psa.py
@@ -24,14 +24,13 @@ bl_info = {
"location": "File > Export > Skeletal Mesh/Animation Data (.psk/.psa)",
"description": "Export Skeleletal Mesh/Animation Data",
"warning": "",
- "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/"\
+ "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/"
"Scripts/Import-Export/Unreal_psk_psa",
- "tracker_url": "https://projects.blender.org/tracker/index.php?"\
- "func=detail&aid=21366",
+ "tracker_url": "https://developer.blender.org/T21366",
"category": "Import-Export"}
"""
--- Unreal Skeletal Mesh and Animation Export (.psk and .psa) export script v0.0.1 --<br>
+-- Unreal Skeletal Mesh and Animation Export (.psk and .psa) export script v0.0.1 --<br>
- NOTES:
- This script Exports To Unreal's PSK and PSA file formats for Skeletal Meshes and Animations. <br>
@@ -45,11 +44,11 @@ bl_info = {
[ - Edit by: Darknet
- v0.0.3 - v0.0.12
-- This will work on UT3 and it is a stable version that work with vehicle for testing.
+- This will work on UT3 and it is a stable version that work with vehicle for testing.
- Main Bone fix no dummy needed to be there.
- Just bone issues position, rotation, and offset for psk.
- The armature bone position, rotation, and the offset of the bone is fix. It was to deal with skeleton mesh export for psk.
-- Animation is fix for position, offset, rotation bone support one rotation direction when armature build.
+- Animation is fix for position, offset, rotation bone support one rotation direction when armature build.
- It will convert your mesh into triangular when exporting to psk file.
- Did not work with psa export yet.
@@ -151,7 +150,7 @@ from bpy.props import *
from struct import pack
# REFERENCE MATERIAL JUST IN CASE:
-#
+#
# U = x / sqrt(x^2 + y^2 + z^2)
# V = y / sqrt(x^2 + y^2 + z^2)
#
@@ -196,21 +195,21 @@ def verbose( msg, iteration=-1, max_iterations=4, msg_truncated="..." ):
return
print(msg)
-
+
#===========================================================================
# Log header/separator
#===========================================================================
def header( msg, justify='LEFT', spacer='_', cols=78 ):
-
+
if justify == 'LEFT':
s = '{:{spacer}<{cols}}'.format(msg+" ", spacer=spacer, cols=cols)
-
+
elif justify == 'RIGHT':
s = '{:{spacer}>{cols}}'.format(" "+msg, spacer=spacer, cols=cols)
-
+
else:
s = '{:{spacer}^{cols}}'.format(" "+msg+" ", spacer=spacer, cols=cols)
-
+
return "\n" + s + "\n"
#===========================================================================
@@ -218,11 +217,11 @@ def header( msg, justify='LEFT', spacer='_', cols=78 ):
# the object must be usable as a dictionary key
#===========================================================================
class ObjMap:
-
+
def __init__(self):
self.dict = {}
self.next = 0
-
+
def get(self, obj):
if obj in self.dict:
return self.dict[obj]
@@ -231,37 +230,37 @@ class ObjMap:
self.next = self.next + 1
self.dict[obj] = id
return id
-
+
def items(self):
getval = operator.itemgetter(0)
getkey = operator.itemgetter(1)
return map(getval, sorted(self.dict.items(), key=getkey))
#===========================================================================
-# RG - UNREAL DATA STRUCTS - CONVERTED FROM C STRUCTS GIVEN ON UDN SITE
+# RG - UNREAL DATA STRUCTS - CONVERTED FROM C STRUCTS GIVEN ON UDN SITE
# provided here: http://udn.epicgames.com/Two/BinaryFormatSpecifications.html
# updated UDK (Unreal Engine 3): http://udn.epicgames.com/Three/BinaryFormatSpecifications.html
#===========================================================================
class FQuat:
- def __init__(self):
+ def __init__(self):
self.X = 0.0
self.Y = 0.0
self.Z = 0.0
self.W = 1.0
-
+
def dump(self):
return pack('ffff', self.X, self.Y, self.Z, self.W)
-
+
def __cmp__(self, other):
return cmp(self.X, other.X) \
or cmp(self.Y, other.Y) \
or cmp(self.Z, other.Z) \
or cmp(self.W, other.W)
-
+
def __hash__(self):
return hash(self.X) ^ hash(self.Y) ^ hash(self.Z) ^ hash(self.W)
-
+
def __str__(self):
return "[%f,%f,%f,%f](FQuat)" % (self.X, self.Y, self.Z, self.W)
@@ -271,34 +270,34 @@ class FVector(object):
self.X = X
self.Y = Y
self.Z = Z
-
+
def dump(self):
return pack('fff', self.X, self.Y, self.Z)
-
+
def __cmp__(self, other):
return cmp(self.X, other.X) \
or cmp(self.Y, other.Y) \
or cmp(self.Z, other.Z)
-
+
def _key(self):
return (type(self).__name__, self.X, self.Y, self.Z)
-
+
def __hash__(self):
return hash(self._key())
-
+
def __eq__(self, other):
if not hasattr(other, '_key'):
return False
- return self._key() == other._key()
-
+ return self._key() == other._key()
+
def dot(self, other):
return self.X * other.X + self.Y * other.Y + self.Z * other.Z
-
+
def cross(self, other):
return FVector(self.Y * other.Z - self.Z * other.Y,
self.Z * other.X - self.X * other.Z,
self.X * other.Y - self.Y * other.X)
-
+
def sub(self, other):
return FVector(self.X - other.X,
self.Y - other.Y,
@@ -313,7 +312,7 @@ class VJointPos:
self.XSize = 0.0
self.YSize = 0.0
self.ZSize = 0.0
-
+
def dump(self):
return self.Orientation.dump() + self.Position.dump() + pack('4f', self.Length, self.XSize, self.YSize, self.ZSize)
@@ -332,7 +331,7 @@ class AnimInfoBinary:
self.StartBone = 0
self.FirstRawFrame = 0
self.NumRawFrames = 0
-
+
def dump(self):
return pack('64s64siiiifffiii', str.encode(self.Name), str.encode(self.Group), self.TotalBones, self.RootInclude, self.KeyCompressionStyle, self.KeyQuotum, self.KeyPrediction, self.TrackTime, self.AnimRate, self.StartBone, self.FirstRawFrame, self.NumRawFrames)
@@ -343,7 +342,7 @@ class VChunkHeader:
self.TypeFlag = 1999801 # special value
self.DataSize = type_size
self.DataCount = 0
-
+
def dump(self):
return pack('20siii', self.ChunkID, self.TypeFlag, self.DataSize, self.DataCount)
@@ -357,7 +356,7 @@ class VMaterial:
self.AuxFlags = 0 # DWORD
self.LodBias = 0
self.LodStyle = 0
-
+
def dump(self):
#print("DATA MATERIAL:",self.MaterialName)
return pack('64siLiLii', str.encode(self.MaterialName), self.TextureIndex, self.PolyFlags, self.AuxMaterial, self.AuxFlags, self.LodBias, self.LodStyle)
@@ -370,11 +369,11 @@ class VBone:
self.NumChildren = 0
self.ParentIndex = 0
self.BonePos = VJointPos()
-
+
def dump(self):
return pack('64sLii', str.encode(self.Name), self.Flags, self.NumChildren, self.ParentIndex) + self.BonePos.dump()
-#same as above - whatever - this is how Epic does it...
+#same as above - whatever - this is how Epic does it...
class FNamedBoneBinary:
def __init__(self):
@@ -384,7 +383,7 @@ class FNamedBoneBinary:
self.ParentIndex = 0
self.BonePos = VJointPos()
self.IsRealBone = 0 # this is set to 1 when the bone is actually a bone in the mesh and not a dummy
-
+
def dump(self):
return pack('64sLii', str.encode(self.Name), self.Flags, self.NumChildren, self.ParentIndex) + self.BonePos.dump()
@@ -394,7 +393,7 @@ class VRawBoneInfluence:
self.Weight = 0.0
self.PointIndex = 0
self.BoneIndex = 0
-
+
def dump(self):
return pack('fii', self.Weight, self.PointIndex, self.BoneIndex)
@@ -404,7 +403,7 @@ class VQuatAnimKey:
self.Position = FVector()
self.Orientation = FQuat()
self.Time = 0.0
-
+
def dump(self):
return self.Position.dump() + self.Orientation.dump() + pack('f', self.Time)
@@ -416,25 +415,25 @@ class VVertex(object):
self.V = 0.0
self.MatIndex = 0 # BYTE
self.Reserved = 0 # BYTE
- self.SmoothGroup = 0
-
+ self.SmoothGroup = 0
+
def dump(self):
return pack('HHffBBH', self.PointIndex, 0, self.U, self.V, self.MatIndex, self.Reserved, 0)
-
+
def __cmp__(self, other):
return cmp(self.PointIndex, other.PointIndex) \
or cmp(self.U, other.U) \
or cmp(self.V, other.V) \
or cmp(self.MatIndex, other.MatIndex) \
or cmp(self.Reserved, other.Reserved) \
- or cmp(self.SmoothGroup, other.SmoothGroup )
-
+ or cmp(self.SmoothGroup, other.SmoothGroup )
+
def _key(self):
return (type(self).__name__, self.PointIndex, self.U, self.V, self.MatIndex, self.Reserved)
-
+
def __hash__(self):
return hash(self._key())
-
+
def __eq__(self, other):
if not hasattr(other, '_key'):
return False
@@ -447,7 +446,7 @@ class VPointSimple:
def __cmp__(self, other):
return cmp(self.Point, other.Point)
-
+
def __hash__(self):
return hash(self._key())
@@ -463,26 +462,26 @@ class VPoint(object):
def __init__(self):
self.Point = FVector()
- self.SmoothGroup = 0
-
+ self.SmoothGroup = 0
+
def dump(self):
return self.Point.dump()
-
+
def __cmp__(self, other):
return cmp(self.Point, other.Point) \
- or cmp(self.SmoothGroup, other.SmoothGroup)
-
+ or cmp(self.SmoothGroup, other.SmoothGroup)
+
def _key(self):
return (type(self).__name__, self.Point, self.SmoothGroup)
-
+
def __hash__(self):
return hash(self._key()) \
- ^ hash(self.SmoothGroup)
-
+ ^ hash(self.SmoothGroup)
+
def __eq__(self, other):
if not hasattr(other, '_key'):
return False
- return self._key() == other._key()
+ return self._key() == other._key()
class VTriangle:
@@ -493,7 +492,7 @@ class VTriangle:
self.MatIndex = 0 # BYTE
self.AuxMatIndex = 0 # BYTE
self.SmoothingGroups = 0 # DWORD
-
+
def dump(self):
return pack('HHHBBL', self.WedgeIndex0, self.WedgeIndex1, self.WedgeIndex2, self.MatIndex, self.AuxMatIndex, self.SmoothingGroups)
#print("smooth",self.SmoothingGroups)
@@ -503,21 +502,21 @@ class VTriangle:
#===========================================================================
#===========================================================================
-# RG - helper class to handle the normal way the UT files are stored
+# RG - helper class to handle the normal way the UT files are stored
# as sections consisting of a header and then a list of data structures
#===========================================================================
class FileSection:
-
+
def __init__(self, name, type_size):
self.Header = VChunkHeader(name, type_size)
self.Data = [] # list of datatypes
-
+
def dump(self):
data = self.Header.dump()
for i in range(len(self.Data)):
data = data + self.Data[i].dump()
return data
-
+
def UpdateHeader(self):
self.Header.DataCount = len(self.Data)
@@ -525,7 +524,7 @@ class FileSection:
# PSK
#===========================================================================
class PSKFile:
-
+
def __init__(self):
self.GeneralHeader = VChunkHeader("ACTRHEAD", 0)
self.Points = FileSection("PNTS0000", SIZE_VPOINT) # VPoint
@@ -534,8 +533,8 @@ class PSKFile:
self.Materials = FileSection("MATT0000", SIZE_VMATERIAL) # VMaterial
self.Bones = FileSection("REFSKELT", SIZE_VBONE) # VBone
self.Influences = FileSection("RAWWEIGHTS", SIZE_VRAWBONEINFLUENCE) # VRawBoneInfluence
-
- #RG - this mapping is not dumped, but is used internally to store the new point indices
+
+ #RG - this mapping is not dumped, but is used internally to store the new point indices
# for vertex groups calculated during the mesh dump, so they can be used again
# to dump bone influences during the armature dump
#
@@ -545,29 +544,29 @@ class PSKFile:
# Layout:
# { groupname : [ (index, weight), ... ], ... }
#
- # example:
+ # example:
# { 'MyVertexGroup' : [ (0, 1.0), (5, 1.0), (3, 0.5) ] , 'OtherGroup' : [(2, 1.0)] }
-
- self.VertexGroups = {}
-
+
+ self.VertexGroups = {}
+
def AddPoint(self, p):
self.Points.Data.append(p)
-
+
def AddWedge(self, w):
self.Wedges.Data.append(w)
-
+
def AddFace(self, f):
self.Faces.Data.append(f)
-
+
def AddMaterial(self, m):
self.Materials.Data.append(m)
-
+
def AddBone(self, b):
self.Bones.Data.append(b)
-
+
def AddInfluence(self, i):
self.Influences.Data.append(i)
-
+
def UpdateHeaders(self):
self.Points.UpdateHeader()
self.Wedges.UpdateHeader()
@@ -575,12 +574,12 @@ class PSKFile:
self.Materials.UpdateHeader()
self.Bones.UpdateHeader()
self.Influences.UpdateHeader()
-
+
def dump(self):
self.UpdateHeaders()
data = self.GeneralHeader.dump() + self.Points.dump() + self.Wedges.dump() + self.Faces.dump() + self.Materials.dump() + self.Bones.dump() + self.Influences.dump()
return data
-
+
def GetMatByIndex(self, mat_index):
if mat_index >= 0 and len(self.Materials.Data) > mat_index:
return self.Materials.Data[mat_index]
@@ -590,7 +589,7 @@ class PSKFile:
m.MaterialName = MaterialName[mat_index]
self.AddMaterial(m)
return m
-
+
def PrintOut(self):
print( "{:>16} {:}".format( "Points", len(self.Points.Data) ) )
print( "{:>16} {:}".format( "Wedges", len(self.Wedges.Data) ) )
@@ -603,16 +602,16 @@ class PSKFile:
# PSA
#
# Notes from UDN:
-# The raw key array holds all the keys for all the bones in all the specified sequences,
+# The raw key array holds all the keys for all the bones in all the specified sequences,
# organized as follows:
-# For each AnimInfoBinary's sequence there are [Number of bones] times [Number of frames keys]
-# in the VQuatAnimKeys, laid out as tracks of [numframes] keys for each bone in the order of
-# the bones as defined in the array of FnamedBoneBinary in the PSA.
+# For each AnimInfoBinary's sequence there are [Number of bones] times [Number of frames keys]
+# in the VQuatAnimKeys, laid out as tracks of [numframes] keys for each bone in the order of
+# the bones as defined in the array of FnamedBoneBinary in the PSA.
#
-# Once the data from the PSK (now digested into native skeletal mesh) and PSA (digested into
-# a native animation object containing one or more sequences) are associated together at runtime,
-# bones are linked up by name. Any bone in a skeleton (from the PSK) that finds no partner in
-# the animation sequence (from the PSA) will assume its reference pose stance ( as defined in
+# Once the data from the PSK (now digested into native skeletal mesh) and PSA (digested into
+# a native animation object containing one or more sequences) are associated together at runtime,
+# bones are linked up by name. Any bone in a skeleton (from the PSK) that finds no partner in
+# the animation sequence (from the PSA) will assume its reference pose stance ( as defined in
# the offsets & rotations that are in the VBones making up the reference skeleton from the PSK)
#===========================================================================
class PSAFile:
@@ -624,57 +623,57 @@ class PSAFile:
self.RawKeys = FileSection("ANIMKEYS", SIZE_VQUATANIMKEY) #VQuatAnimKey
# this will take the format of key=Bone Name, value = (BoneIndex, Bone Object)
# THIS IS NOT DUMPED
- self.BoneLookup = {}
+ self.BoneLookup = {}
def AddBone(self, b):
self.Bones.Data.append(b)
-
+
def AddAnimation(self, a):
self.Animations.Data.append(a)
-
+
def AddRawKey(self, k):
self.RawKeys.Data.append(k)
-
+
def UpdateHeaders(self):
self.Bones.UpdateHeader()
self.Animations.UpdateHeader()
self.RawKeys.UpdateHeader()
-
+
def GetBoneByIndex(self, bone_index):
if bone_index >= 0 and len(self.Bones.Data) > bone_index:
return self.Bones.Data[bone_index]
-
+
def IsEmpty(self):
return (len(self.Bones.Data) == 0 or len(self.Animations.Data) == 0)
-
+
def StoreBone(self, b):
self.BoneLookup[b.Name] = [-1, b]
-
+
def UseBone(self, bone_name):
if bone_name in self.BoneLookup:
bone_data = self.BoneLookup[bone_name]
-
+
if bone_data[0] == -1:
bone_data[0] = len(self.Bones.Data)
self.AddBone(bone_data[1])
#self.Bones.Data.append(bone_data[1])
-
+
return bone_data[0]
-
+
def GetBoneByName(self, bone_name):
if bone_name in self.BoneLookup:
bone_data = self.BoneLookup[bone_name]
return bone_data[1]
-
+
def GetBoneIndex(self, bone_name):
if bone_name in self.BoneLookup:
bone_data = self.BoneLookup[bone_name]
return bone_data[0]
-
+
def dump(self):
self.UpdateHeaders()
return self.GeneralHeader.dump() + self.Bones.dump() + self.Animations.dump() + self.RawKeys.dump()
-
+
def PrintOut(self):
print( "{:>16} {:}".format( "Bones", len(self.Bones.Data) ) )
print( "{:>16} {:}".format( "Animations", len(self.Animations.Data) ) )
@@ -709,7 +708,7 @@ def make_namedbonebinary( name, parent_index, child_count, orientation_quat, pos
bone.BonePos.Position.Y = position_vect.y
bone.BonePos.Position.Z = position_vect.z
bone.IsRealBone = is_real
- return bone
+ return bone
def make_fquat( bquat ):
quat = FQuat()
@@ -719,7 +718,7 @@ def make_fquat( bquat ):
quat.Z = -bquat.z
quat.W = bquat.w
return quat
-
+
def make_fquat_default( bquat ):
quat = FQuat()
#print(dir(bquat))
@@ -737,7 +736,7 @@ def is_1d_face( face, mesh ):
v0 = face.vertices[0]
v1 = face.vertices[1]
v2 = face.vertices[2]
-
+
return (mesh.vertices[v0].co == mesh.vertices[v1].co \
or mesh.vertices[v1].co == mesh.vertices[v2].co \
or mesh.vertices[v2].co == mesh.vertices[v0].co)
@@ -748,9 +747,9 @@ def is_1d_face( face, mesh ):
# (renamed to seperate it from VVertex.SmoothGroup)
#===========================================================================
class SmoothingGroup:
-
+
static_id = 1
-
+
def __init__(self):
self.faces = []
self.neighboring_faces = []
@@ -758,12 +757,12 @@ class SmoothingGroup:
self.id = -1
self.local_id = SmoothingGroup.static_id
SmoothingGroup.static_id += 1
-
+
def __cmp__(self, other):
if isinstance(other, SmoothingGroup):
return cmp( self.local_id, other.local_id )
return -1
-
+
def __hash__(self):
return hash(self.local_id)
@@ -776,37 +775,37 @@ class SmoothingGroup:
temp_id = temp_id << 1
else:
raise Error("Smoothing Group ID Overflowed, Smoothing Group evidently has more than 31 neighboring groups")
-
+
self.id = temp_id
return self.id
-
+
def make_neighbor(self, new_neighbor):
if new_neighbor not in self.neighboring_groups:
self.neighboring_groups.append( new_neighbor )
def contains_face(self, face):
return (face in self.faces)
-
+
def add_neighbor_face(self, face):
if not face in self.neighboring_faces:
self.neighboring_faces.append( face )
-
+
def add_face(self, face):
if not face in self.faces:
self.faces.append( face )
def determine_edge_sharing( mesh ):
-
+
edge_sharing_list = dict()
-
+
for edge in mesh.edges:
edge_sharing_list[edge.key] = []
-
+
for face in mesh.tessfaces:
for key in face.edge_keys:
if not face in edge_sharing_list[key]:
edge_sharing_list[key].append(face) # mark this face as sharing this edge
-
+
return edge_sharing_list
def find_edges( mesh, key ):
@@ -819,21 +818,21 @@ def find_edges( mesh, key ):
return edge.index
def add_face_to_smoothgroup( mesh, face, edge_sharing_list, smoothgroup ):
-
+
if face in smoothgroup.faces:
return
smoothgroup.add_face(face)
-
+
for key in face.edge_keys:
-
+
edge_id = find_edges(mesh, key)
-
+
if edge_id != None:
-
+
# not sharp
if not( mesh.edges[edge_id].use_edge_sharp):
-
+
for shared_face in edge_sharing_list[key]:
if shared_face != face:
# recursive
@@ -845,14 +844,14 @@ def add_face_to_smoothgroup( mesh, face, edge_sharing_list, smoothgroup ):
smoothgroup.add_neighbor_face( shared_face )
def determine_smoothgroup_for_face( mesh, face, edge_sharing_list, smoothgroup_list ):
-
+
for group in smoothgroup_list:
if (face in group.faces):
return
-
+
smoothgroup = SmoothingGroup();
add_face_to_smoothgroup( mesh, face, edge_sharing_list, smoothgroup )
-
+
if not smoothgroup in smoothgroup_list:
smoothgroup_list.append( smoothgroup )
@@ -869,16 +868,16 @@ def build_neighbors_tree( smoothgroup_list ):
# parse_smooth_groups
#===========================================================================
def parse_smooth_groups( mesh ):
-
+
print("Parsing smooth groups...")
-
+
t = time.clock()
smoothgroup_list = []
edge_sharing_list = determine_edge_sharing(mesh)
#print("faces:",len(mesh.tessfaces))
interval = math.floor(len(mesh.tessfaces) / 100)
if interval == 0: #if the faces are few do this
- interval = math.floor(len(mesh.tessfaces) / 10)
+ interval = math.floor(len(mesh.tessfaces) / 10)
#print("FACES:",len(mesh.tessfaces),"//100 =" "interval:",interval)
for face in mesh.tessfaces:
#print(dir(face))
@@ -888,14 +887,14 @@ def parse_smooth_groups( mesh ):
print("Processing... {}%\r".format( int(face.index / len(mesh.tessfaces) * 100) ), end='')
sys.stdout.flush()
print("Completed" , ' '*20)
-
+
verbose("len(smoothgroup_list)={}".format(len(smoothgroup_list)))
-
+
build_neighbors_tree(smoothgroup_list)
-
+
for group in smoothgroup_list:
group.get_valid_smoothgroup_id()
-
+
print("Smooth group parsing completed in {:.2f}s".format(time.clock() - t))
return smoothgroup_list
@@ -904,11 +903,11 @@ def parse_smooth_groups( mesh ):
# blender 2.50 format using the Operators/command convert the mesh to tri mesh
#===========================================================================
def triangulate_mesh( object ):
-
+
verbose(header("triangulateNMesh"))
#print(type(object))
scene = bpy.context.scene
-
+
me_ob = object.copy()
me_ob.data = object.to_mesh(bpy.context.scene, True, 'PREVIEW') #write data object
bpy.context.scene.objects.link(me_ob)
@@ -916,24 +915,24 @@ def triangulate_mesh( object ):
bpy.ops.object.mode_set(mode='OBJECT')
for i in scene.objects:
i.select = False # deselect all objects
-
+
me_ob.select = True
scene.objects.active = me_ob
-
+
print("Copy and Convert mesh just incase any way...")
-
+
bpy.ops.object.mode_set(mode='EDIT')
bpy.ops.mesh.select_all(action='SELECT')# select all the face/vertex/edge
bpy.ops.object.mode_set(mode='EDIT')
bpy.ops.mesh.quads_convert_to_tris()
bpy.context.scene.update()
-
+
bpy.ops.object.mode_set(mode='OBJECT')
-
+
bpy.context.scene.udk_option_triangulate = True
-
+
verbose("Triangulated mesh")
-
+
me_ob.data = me_ob.to_mesh(bpy.context.scene, True, 'PREVIEW') #write data object
bpy.context.scene.update()
return me_ob
@@ -945,7 +944,7 @@ def meshmerge(selectedobjects):
if len(selectedobjects) > 1:
print("selectedobjects:",len(selectedobjects)) #print select object
count = 0 #reset count
- for count in range(len( selectedobjects)):
+ for count in range(len( selectedobjects)):
#print("Index:",count)
if selectedobjects[count] != None:
me_da = selectedobjects[count].data.copy() #copy data
@@ -967,7 +966,7 @@ def meshmerge(selectedobjects):
if len(cloneobjects) > 1:
bpy.types.Scene.udk_copy_merge = True
return cloneobjects[0]
-
+
#sort the mesh center top list and not center at the last array. Base on order while select to merge mesh to make them center.
def sortmesh(selectmesh):
print("MESH SORTING...")
@@ -980,7 +979,7 @@ def sortmesh(selectmesh):
else:#if not add here for not center
notcentermesh.append(selectmesh[countm])
selectmesh = []
- #add mesh object in order for merge object
+ #add mesh object in order for merge object
for countm in range(len(centermesh)):
selectmesh.append(centermesh[countm])
for countm in range(len(notcentermesh)):
@@ -1007,10 +1006,10 @@ def parse_mesh( mesh, psk ):
bpy.context.scene.objects.unlink(setmesh)
#print("FACES----:",len(mesh.data.tessfaces))
verbose("Working mesh object: {}".format(mesh.name))
-
+
#collect a list of the material names
print("Materials...")
-
+
mat_slot_index = 0
for slot in mesh.material_slots:
@@ -1019,7 +1018,7 @@ def parse_mesh( mesh, psk ):
MaterialName.append(slot.name)
#if slot.material.texture_slots[0] != None:
#if slot.material.texture_slots[0].texture.image.filepath != None:
- #print(" Texture path {}".format(slot.material.texture_slots[0].texture.image.filepath))
+ #print(" Texture path {}".format(slot.material.texture_slots[0].texture.image.filepath))
#create the current material
v_material = psk.GetMatByIndex(mat_slot_index)
v_material.MaterialName = slot.name
@@ -1029,7 +1028,7 @@ def parse_mesh( mesh, psk ):
verbose(" PSK index {}".format(v_material.TextureIndex))
#END slot in mesh.material_slots
-
+
# object_mat = mesh.materials[0]
#object_material_index = mesh.active_material_index
#FIXME ^ this is redundant due to "= face.material_index" in face loop
@@ -1037,19 +1036,19 @@ def parse_mesh( mesh, psk ):
wedges = ObjMap()
points = ObjMap() #vertex
points_linked = {}
-
+
discarded_face_count = 0
sys.setrecursionlimit(1000000)
smoothgroup_list = parse_smooth_groups(mesh.data)
-
+
print("{} faces".format(len(mesh.data.tessfaces)))
-
+
print("Smooth groups active:", bpy.context.scene.udk_option_smoothing_groups)
-
+
for face in mesh.data.tessfaces:
-
+
smoothgroup_id = 0x80000000
-
+
for smooth_group in smoothgroup_list:
if smooth_group.contains_face(face):
smoothgroup_id = smooth_group.id
@@ -1059,38 +1058,38 @@ def parse_mesh( mesh, psk ):
#print current_face.uv_textures
# modified by VendorX
object_material_index = face.material_index
-
+
if len(face.vertices) != 3:
raise Error("Non-triangular face (%i)" % len(face.vertices))
-
- #RG - apparently blender sometimes has problems when you do quad to triangle
+
+ #RG - apparently blender sometimes has problems when you do quad to triangle
# conversion, and ends up creating faces that have only TWO points -
- # one of the points is simply in the vertex list for the face twice.
- # This is bad, since we can't get a real face normal for a LINE, we need
- # a plane for this. So, before we add the face to the list of real faces,
- # ensure that the face is actually a plane, and not a line. If it is not
+ # one of the points is simply in the vertex list for the face twice.
+ # This is bad, since we can't get a real face normal for a LINE, we need
+ # a plane for this. So, before we add the face to the list of real faces,
+ # ensure that the face is actually a plane, and not a line. If it is not
# planar, just discard it and notify the user in the console after we're
# done dumping the rest of the faces
-
+
if not is_1d_face(face, mesh.data):
-
+
wedge_list = []
vect_list = []
-
+
#get or create the current material
psk.GetMatByIndex(object_material_index)
face_index = face.index
has_uv = False
face_uv = None
-
+
if len(mesh.data.uv_textures) > 0:
- has_uv = True
+ has_uv = True
uv_layer = mesh.data.tessface_uv_textures.active
face_uv = uv_layer.data[face_index]
#size(data) is number of texture faces. Each face has UVs
#print("DATA face uv: ",len(faceUV.uv), " >> ",(faceUV.uv[0][0]))
-
+
for i in range(3):
vert_index = face.vertices[i]
vert = mesh.data.vertices[vert_index]
@@ -1105,11 +1104,11 @@ def parse_mesh( mesh, psk ):
else:
#print ("No UVs?")
uv = [0.0, 0.0]
-
+
#flip V coordinate because UEd requires it and DOESN'T flip it on its own like it
#does with the mesh Y coordinates. this is otherwise known as MAGIC-2
uv[1] = 1.0 - uv[1]
-
+
# clamp UV coords if udk_option_clamp_uv is True
if bpy.context.scene.udk_option_clamp_uv:
if (uv[0] > 1):
@@ -1120,11 +1119,11 @@ def parse_mesh( mesh, psk ):
uv[1] = 1
if (uv[1] < 0):
uv[1] = 0
-
+
# RE - Append untransformed vector (for normal calc below)
# TODO: convert to Blender.Mathutils
vect_list.append( FVector(vert.co.x, vert.co.y, vert.co.z) )
-
+
# Transform position for export
#vpos = vert.co * object_material_index
@@ -1148,13 +1147,13 @@ def parse_mesh( mesh, psk ):
lPoint.Point.X = vpos.x
lPoint.Point.Y = vpos.y
lPoint.Point.Z = vpos.z
-
+
if lPoint in points_linked:
if not(p in points_linked[lPoint]):
points_linked[lPoint].append(p)
else:
points_linked[lPoint] = [p]
-
+
# Create the wedge
w = VVertex()
w.MatIndex = object_material_index
@@ -1165,18 +1164,18 @@ def parse_mesh( mesh, psk ):
w.SmoothGroup = smoothgroup_id
index_wedge = wedges.get(w)
wedge_list.append(index_wedge)
-
+
#print results
#print("result PointIndex={}, U={:.6f}, V={:.6f}, wedge_index={}".format(
# w.PointIndex,
# w.U,
# w.V,
# index_wedge))
-
+
#END for i in range(3)
# Determine face vertex order
-
+
# TODO: convert to Blender.Mathutils
# get normal from blender
no = face.normal
@@ -1202,13 +1201,13 @@ def parse_mesh( mesh, psk ):
dindex0 = face.vertices[0];
dindex1 = face.vertices[1];
dindex2 = face.vertices[2];
-
+
mesh.data.vertices[dindex0].select = True
mesh.data.vertices[dindex1].select = True
mesh.data.vertices[dindex2].select = True
-
+
raise Error("Normal coplanar with face! points:", mesh.data.vertices[dindex0].co, mesh.data.vertices[dindex1].co, mesh.data.vertices[dindex2].co)
-
+
face.select = True
if face.use_smooth == True:
tri.SmoothingGroups = 1
@@ -1219,53 +1218,53 @@ def parse_mesh( mesh, psk ):
if bpy.context.scene.udk_option_smoothing_groups:
tri.SmoothingGroups = smoothgroup_id
print("Bool Smooth")
-
+
psk.AddFace(tri)
- #END if not is_1d_face(current_face, mesh.data)
+ #END if not is_1d_face(current_face, mesh.data)
else:
discarded_face_count += 1
-
+
#END face in mesh.data.faces
-
+
print("{} points".format(len(points.dict)))
-
+
for point in points.items():
psk.AddPoint(point)
-
+
if len(points.dict) > 32767:
raise Error("Mesh vertex limit exceeded! {} > 32767".format(len(points.dict)))
-
+
print("{} wedges".format(len(wedges.dict)))
-
+
for wedge in wedges.items():
psk.AddWedge(wedge)
-
+
# alert the user to degenerate face issues
if discarded_face_count > 0:
print("WARNING: Mesh contained degenerate faces (non-planar)")
print(" Discarded {} faces".format(discarded_face_count))
-
- #RG - walk through the vertex groups and find the indexes into the PSK points array
- #for them, then store that index and the weight as a tuple in a new list of
+
+ #RG - walk through the vertex groups and find the indexes into the PSK points array
+ #for them, then store that index and the weight as a tuple in a new list of
#verts for the group that we can look up later by bone name, since Blender matches
#verts to bones for influences by having the VertexGroup named the same thing as
#the bone
-
- #[print(x, len(points_linked[x])) for x in points_linked]
+
+ #[print(x, len(points_linked[x])) for x in points_linked]
#print("pointsindex length ",len(points_linked))
#vertex group
-
+
# all vertex groups of the mesh (obj)...
for obj_vertex_group in mesh.vertex_groups:
-
+
#print(" bone group build:",obj_vertex_group.name)#print bone name
#print(dir(obj_vertex_group))
verbose("obj_vertex_group.name={}".format(obj_vertex_group.name))
-
+
vertex_list = []
-
+
# all vertices in the mesh...
for vertex in mesh.data.vertices:
#print(dir(vertex))
@@ -1280,7 +1279,7 @@ def parse_mesh( mesh, psk ):
vpos.y = vpos.y * bpy.context.scene.udk_option_scale
vpos.z = vpos.z * bpy.context.scene.udk_option_scale
p.Point.X = vpos.x
- p.Point.Y = vpos.y
+ p.Point.Y = vpos.y
p.Point.Z = vpos.z
#print(p)
#print(len(points_linked[p]))
@@ -1292,11 +1291,11 @@ def parse_mesh( mesh, psk ):
except Exception:#if get error ignore them #not safe I think
print("Error link points!")
pass
-
+
#bone name, [point id and wieght]
#print("Add Vertex Group:",obj_vertex_group.name, " No. Points:",len(vertex_list))
psk.VertexGroups[obj_vertex_group.name] = vertex_list
-
+
# remove the temporary triangulated mesh
if bpy.context.scene.udk_option_triangulate == True:
verbose("Removing temporary triangle mesh: {}".format(mesh.name))
@@ -1308,34 +1307,34 @@ def parse_mesh( mesh, psk ):
# Collate bones that belong to the UDK skeletal mesh
#===========================================================================
def parse_armature( armature, psk, psa ):
-
+
print(header("ARMATURE", 'RIGHT'))
verbose("Armature object: {} Armature data: {}".format(armature.name, armature.data.name))
-
+
# generate a list of root bone candidates
root_candidates = [b for b in armature.data.bones if b.parent == None and b.use_deform == True]
-
+
# should be a single, unambiguous result
if len(root_candidates) == 0:
raise Error("Cannot find root for UDK bones. The root bone must use deform.")
-
+
if len(root_candidates) > 1:
raise Error("Ambiguous root for UDK. More than one root bone is using deform.")
-
+
# prep for bone collection
udk_root_bone = root_candidates[0]
udk_bones = []
BoneUtil.static_bone_id = 0 # replaces global
-
+
# traverse bone chain
print("{: <3} {: <48} {: <20}".format("ID", "Bone", "Status"))
print()
recurse_bone(udk_root_bone, udk_bones, psk, psa, 0, armature.matrix_local)
-
+
# final validation
if len(udk_bones) < 3:
raise Error("Less than three bones may crash UDK (legacy issue?)")
-
+
# return a list of bones making up the entire udk skel
# this is passed to parse_animation instead of working from keyed bones in the action
return udk_bones
@@ -1350,14 +1349,14 @@ def parse_armature( armature, psk, psa ):
# indent text indent for recursive log
#===========================================================================
def recurse_bone( bone, bones, psk, psa, parent_id, parent_matrix, indent="" ):
-
+
status = "Ok"
-
+
bones.append(bone);
if not bone.use_deform:
status = "No effect"
-
+
# calc parented bone transform
if bone.parent != None:
quat = make_fquat(bone.matrix.to_quaternion())
@@ -1378,12 +1377,12 @@ def recurse_bone( bone, bones, psk, psa, parent_id, parent_matrix, indent="" ):
translation.z = translation.z * bpy.context.scene.udk_option_scale
bone_id = BoneUtil.static_bone_id # ALT VERS
BoneUtil.static_bone_id += 1 # ALT VERS
-
+
child_count = len(bone.children)
-
+
psk.AddBone( make_vbone(bone.name, parent_id, child_count, quat, translation) )
psa.StoreBone( make_namedbonebinary(bone.name, parent_id, child_count, quat, translation, 1) )
-
+
#RG - dump influences for this bone - use the data we collected in the mesh dump phase to map our bones to vertex groups
if bone.name in psk.VertexGroups:
vertex_list = psk.VertexGroups[bone.name]
@@ -1400,12 +1399,12 @@ def recurse_bone( bone, bones, psk, psa, parent_id, parent_matrix, indent="" ):
else:
status = "No vertex group"
#FIXME overwriting previous status error?
-
+
print("{:<3} {:<48} {:<20}".format(bone_id, indent+bone.name, status))
-
+
#bone.matrix_local
#recursively dump child bones
-
+
for child_bone in bone.children:
recurse_bone(child_bone, bones, psk, psa, bone_id, parent_matrix, " "+indent)
@@ -1420,12 +1419,12 @@ class BoneUtil:
# psa the PSA file object
#===========================================================================
def parse_animation( armature, udk_bones, actions_to_export, psa ):
-
+
print(header("ANIMATION", 'RIGHT'))
-
+
context = bpy.context
anim_rate = context.scene.render.fps
-
+
verbose("Armature object: {}".format(armature.name))
print("Scene: {} FPS: {} Frames: {} to {}".format(context.scene.name, anim_rate, context.scene.frame_start, context.scene.frame_end))
print("Processing {} action(s)".format(len(actions_to_export)))
@@ -1434,13 +1433,13 @@ def parse_animation( armature, udk_bones, actions_to_export, psa ):
print("None Actions Set! skipping...")
return
restoreAction = armature.animation_data.action # Q: is animation_data always valid?
-
+
restoreFrame = context.scene.frame_current # we already do this in export_proxy, but we'll do it here too for now
raw_frame_index = 0 # used to set FirstRawFrame, seperating actions in the raw keyframe array
-
+
# action loop...
for action in actions_to_export:
-
+
# removed: check for armature with no animation; all it did was force you to add one
if not len(action.fcurves):
@@ -1451,14 +1450,14 @@ def parse_animation( armature, udk_bones, actions_to_export, psa ):
# note if loop all actions that is not armature it will override and will break armature animation.
armature.animation_data.action = action
context.scene.update()
-
+
# min/max frames define range
framemin, framemax = action.frame_range
start_frame = int(framemin)
end_frame = int(framemax)
scene_range = range(start_frame, end_frame + 1)
frame_count = len(scene_range)
-
+
# create the AnimInfoBinary
anim = AnimInfoBinary()
anim.Name = action.name
@@ -1466,11 +1465,11 @@ def parse_animation( armature, udk_bones, actions_to_export, psa ):
anim.NumRawFrames = frame_count
anim.AnimRate = anim_rate
anim.FirstRawFrame = raw_frame_index
-
+
print("{}, frames {} to {} ({} frames)".format(action.name, start_frame, end_frame, frame_count))
-
+
# removed: bone lookup table
-
+
# build a list of pose bones relevant to the collated udk_bones
# fixme: could be done once, prior to loop?
udk_pose_bones = []
@@ -1483,39 +1482,39 @@ def parse_animation( armature, udk_bones, actions_to_export, psa ):
# sort in the order the bones appear in the PSA file
ordered_bones = {}
ordered_bones = sorted([(psa.UseBone(b.name), b) for b in udk_pose_bones], key=operator.itemgetter(0))
-
+
# NOTE: posebone.bone references the obj/edit bone
# REMOVED: unique_bone_indexes is redundant?
-
+
# frame loop...
for i in range(frame_count):
-
+
frame = scene_range[i]
-
+
#verbose("FRAME {}".format(i), i) # test loop sampling
-
+
# advance to frame (automatically updates the pose)
context.scene.frame_set(frame)
-
+
# compute the key for each bone
for bone_data in ordered_bones:
-
+
bone_index = bone_data[0]
pose_bone = bone_data[1]
pose_bone_matrix = mathutils.Matrix(pose_bone.matrix)
-
+
if pose_bone.parent != None:
pose_bone_parent_matrix = mathutils.Matrix(pose_bone.parent.matrix)
pose_bone_matrix = pose_bone_parent_matrix.inverted() * pose_bone_matrix
-
+
head = pose_bone_matrix.to_translation()
quat = pose_bone_matrix.to_quaternion().normalized()
-
+
if pose_bone.parent != None:
quat = make_fquat(quat)
else:
quat = make_fquat_default(quat)
-
+
#scale animation position here?
if bpy.context.scene.udk_option_scale < 0 or bpy.context.scene.udk_option_scale > 1:
head.x = head.x * bpy.context.scene.udk_option_scale
@@ -1527,25 +1526,25 @@ def parse_animation( armature, udk_bones, actions_to_export, psa ):
vkey.Position.Y = head.y
vkey.Position.Z = head.z
vkey.Orientation = quat
-
+
# frame delta = 1.0 / fps
vkey.Time = 1.0 / float(anim_rate) # according to C++ header this is "disregarded"
-
+
psa.AddRawKey(vkey)
-
+
# END for bone_data in ordered_bones
raw_frame_index += 1
-
+
# END for i in range(frame_count)
-
+
anim.TotalBones = len(ordered_bones) # REMOVED len(unique_bone_indexes)
anim.TrackTime = float(frame_count) # frame_count/anim.AnimRate makes more sense, but this is what actually works in UDK
verbose("anim.TotalBones={}, anim.TrackTime={}".format(anim.TotalBones, anim.TrackTime))
-
+
psa.AddAnimation(anim)
-
+
# END for action in actions
# restore
@@ -1560,7 +1559,7 @@ def parse_animation( armature, udk_bones, actions_to_export, psa ):
def collate_actions():
verbose(header("collate_actions"))
actions_to_export = []
-
+
for action in bpy.data.actions:
if bpy.context.scene.udk_option_selectanimations: # check if needed to select actions set for exporting it
print("Action Set is selected!")
@@ -1575,7 +1574,7 @@ def collate_actions():
continue
verbose(" + {}".format(action.name)) #action set name
actions_to_export.append(action) #add to the action array
-
+
return actions_to_export
#===========================================================================
@@ -1584,16 +1583,16 @@ def collate_actions():
#===========================================================================
def find_armature_and_mesh():
verbose(header("find_armature_and_mesh", 'LEFT', '<', 60))
-
+
context = bpy.context
active_object = context.active_object
armature = None
mesh = None
-
+
# TODO:
# this could be more intuitive
#bpy.ops.object.mode_set(mode='OBJECT')
-
+
if bpy.context.scene.udk_option_selectobjects: #if checked select object true do list object on export
print("select mode:")
if len(bpy.context.scene.udkArm_list) > 0:
@@ -1626,14 +1625,14 @@ def find_armature_and_mesh():
mesh = active_object
else:
raise Error("The selected mesh is not parented to the armature")
-
+
# otherwise, expect a single mesh parented to the armature (other object types are ignored)
else:
print("Number of meshes:",len(meshes))
print("Number of meshes (selected):",len(meshes))
if len(meshes) == 1:
mesh = meshes[0]
-
+
elif len(meshes) > 1:
if len(meshselected) >= 1:
mesh = sortmesh(meshselected)
@@ -1651,7 +1650,7 @@ def find_armature_and_mesh():
else:
#bpy.ops.object.mode_set(mode='OBJECT')
all_armatures = [obj for obj in bpy.context.scene.objects if obj.type == 'ARMATURE']
-
+
if len(all_armatures) == 1:#if armature has one scene just assign it
armature = all_armatures[0]
elif len(all_armatures) > 1:#if there more armature then find the select armature
@@ -1665,15 +1664,15 @@ def find_armature_and_mesh():
raise Error("Please select an armatures in the scene")
else:
raise Error("No armatures in scene")
-
+
verbose("Found armature: {}".format(armature.name))
-
+
meshselected = []
parented_meshes = [obj for obj in armature.children if obj.type == 'MESH']
-
+
if len(armature.children) == 0:
raise Error("The selected Armature has no mesh parented to the Armature Object!")
-
+
for obj in armature.children:
#print(dir(obj))
if obj.type == 'MESH' and obj.select == True:
@@ -1684,14 +1683,14 @@ def find_armature_and_mesh():
mesh = active_object
else:
raise Error("The selected mesh is not parented to the armature")
-
+
# otherwise, expect a single mesh parented to the armature (other object types are ignored)
else:
print("Number of meshes:",len(parented_meshes))
print("Number of meshes (selected):",len(meshselected))
if len(parented_meshes) == 1:
mesh = parented_meshes[0]
-
+
elif len(parented_meshes) > 1:
if len(meshselected) >= 1:
mesh = sortmesh(meshselected)
@@ -1699,7 +1698,7 @@ def find_armature_and_mesh():
raise Error("More than one mesh(s) parented to armature. Select object(s)!")
else:
raise Error("No mesh parented to armature")
-
+
verbose("Found mesh: {}".format(mesh.name))
if mesh == None or armature == None:
raise Error("Check Mesh and Armature are list!")
@@ -1707,7 +1706,7 @@ def find_armature_and_mesh():
#print("Armature and Mesh Vertex Groups matches Ok!")
#else:
#raise Error("Armature bones:" + str(len(armature.pose.bones)) + " Mesh Vertex Groups:" + str(len(mesh.vertex_groups)) +" doesn't match!")
-
+
#this will check if object need to be rebuild.
if bpy.context.scene.udk_option_rebuildobjects:
#print("INIT... REBUILDING...")
@@ -1727,14 +1726,14 @@ def find_armature_and_mesh():
def collate_vertex_groups( mesh ):
verbose("collate_vertex_groups")
groups = []
-
+
for group in mesh.vertex_groups:
-
+
groups.append(group)
verbose(" " + group.name)
-
+
return groups
-
+
#===========================================================================
# Main
#===========================================================================
@@ -1743,72 +1742,72 @@ def export(filepath):
bpy.types.Scene.udk_copy_merge = False #in case fail to export set this to default
t = time.clock()
context = bpy.context
-
+
print("Blender Version {}.{}.{}".format(bpy.app.version[0], bpy.app.version[1], bpy.app.version[2]))
print("Filepath: {}".format(filepath))
-
+
verbose("PSK={}, PSA={}".format(context.scene.udk_option_export_psk, context.scene.udk_option_export_psa))
-
+
# find armature and mesh
# [change this to implement alternative methods; raise Error() if not found]
udk_armature, udk_mesh = find_armature_and_mesh()
-
+
# check misc conditions
if not (udk_armature.scale.x == udk_armature.scale.y == udk_armature.scale.z == 1):
raise Error("bad armature scale: armature object should have uniform scale of 1 (ALT-S)")
-
+
if not (udk_mesh.scale.x == udk_mesh.scale.y == udk_mesh.scale.z == 1):
raise Error("bad mesh scale: mesh object should have uniform scale of 1 (ALT-S)")
-
+
if not (udk_armature.location.x == udk_armature.location.y == udk_armature.location.z == 0):
raise Error("bad armature location: armature should be located at origin (ALT-G)")
-
+
if not (udk_mesh.location.x == udk_mesh.location.y == udk_mesh.location.z == 0):
raise Error("bad mesh location: mesh should be located at origin (ALT-G)")
-
+
# prep
psk = PSKFile()
psa = PSAFile()
-
+
# step 1
parse_mesh(udk_mesh, psk)
-
+
# step 2
udk_bones = parse_armature(udk_armature, psk, psa)
-
+
# step 3
if context.scene.udk_option_export_psa == True:
actions = collate_actions()
parse_animation(udk_armature, udk_bones, actions, psa)
-
+
# write files
print(header("Exporting", 'CENTER'))
-
+
psk_filename = filepath + '.psk'
psa_filename = filepath + '.psa'
-
+
if context.scene.udk_option_export_psk == True:
print("Skeletal mesh data...")
psk.PrintOut()
- file = open(psk_filename, "wb")
+ file = open(psk_filename, "wb")
file.write(psk.dump())
- file.close()
+ file.close()
print("Exported: " + psk_filename)
print()
-
+
if context.scene.udk_option_export_psa == True:
print("Animation data...")
if not psa.IsEmpty():
psa.PrintOut()
- file = open(psa_filename, "wb")
+ file = open(psa_filename, "wb")
file.write(psa.dump())
- file.close()
+ file.close()
print("Exported: " + psa_filename)
else:
print("No Animation (.psa file) to export")
print()
-
+
#if objects are rebuild do the unlink
if bpy.context.scene.udk_option_rebuildobjects:
print("Unlinking Objects")
@@ -1826,20 +1825,20 @@ class Operator_UDKExport( bpy.types.Operator ):
"""Export to UDK"""
bl_idname = "object.udk_export"
bl_label = "Export now"
-
+
def execute(self, context):
print( "\n"*8 )
-
+
scene = bpy.context.scene
-
+
scene.udk_option_export_psk = (scene.udk_option_export == '0' or scene.udk_option_export == '2')
scene.udk_option_export_psa = (scene.udk_option_export == '1' or scene.udk_option_export == '2')
-
+
filepath = get_dst_path()
-
+
# cache settings
restore_frame = scene.frame_current
-
+
message = "Finish Export!"
try:
export(filepath)
@@ -1847,15 +1846,15 @@ class Operator_UDKExport( bpy.types.Operator ):
except Error as err:
print(err.message)
message = err.message
-
+
# restore settings
scene.frame_set(restore_frame)
-
+
self.report({'ERROR'}, message)
-
+
# restore settings
scene.frame_set(restore_frame)
-
+
return {'FINISHED'}
#===========================================================================
@@ -1865,7 +1864,7 @@ class Operator_ToggleConsole( bpy.types.Operator ):
"""Show or hide the console"""
bl_idname = "object.toggle_console"
bl_label = "Toggle console"
-
+
#def invoke(self, context, event):
# bpy.ops.wm.console_toggle()
# return{'FINISHED'}
@@ -1894,7 +1893,7 @@ bpy.types.Scene.udk_option_filename_src = EnumProperty(
items = [ ('0', "From object", "Name will be taken from object name"),
('1', "From Blend", "Name will be taken from .blend file name") ],
default = '0')
-
+
bpy.types.Scene.udk_option_export_psk = BoolProperty(
name = "bool export psa",
description = "Boolean for exporting psk format (Skeleton Mesh)",
@@ -1909,7 +1908,7 @@ bpy.types.Scene.udk_option_clamp_uv = BoolProperty(
name = "Clamp UV",
description = "True is to limit Clamp UV co-ordinates to [0-1]. False is unrestricted (x,y). ",
default = False)
-
+
bpy.types.Scene.udk_copy_merge = BoolProperty(
name = "Merge Mesh",
description = "This will copy the mesh(s) and merge the object together and unlink the mesh to be remove while exporting the object.",
@@ -1937,22 +1936,22 @@ bpy.types.Scene.udk_option_triangulate = BoolProperty(
name = "Triangulate Mesh",
description = "Convert Quads to Triangles",
default = False)
-
+
bpy.types.Scene.udk_option_selectanimations = BoolProperty(
name = "Select Animation(s)",
description = "Select animation(s) for export to psa file.",
default = False)
-
+
bpy.types.Scene.udk_option_selectobjects = BoolProperty(
name = "Select Object(s)",
description = "Select Armature and Mesh(s). Just make sure mesh(s) is parent to armature.",
default = False)
-
+
bpy.types.Scene.udk_option_rebuildobjects = BoolProperty(
name = "Rebuild Objects",
description = "In case of deform skeleton mesh and animations data. This will rebuild objects from raw format on export when checked.",
default = False)
-
+
bpy.types.Scene.udk_option_ignoreactiongroupnames = BoolProperty(
name = "Ignore Action Group Names",
description = "This will Ignore Action Set Group Names Check With Armature Bones. It will override armature to set action set.",
@@ -1970,7 +1969,7 @@ class OBJECT_OT_UTSelectedFaceSmooth(bpy.types.Operator):
"""It will only select smooth faces that is select mesh"""
bl_idname = "object.utselectfacesmooth" # XXX, name???
bl_label = "Select Smooth Faces"#"Select Smooth faces"
-
+
def invoke(self, context, event):
print("----------------------------------------")
print("Init Select Face(s):")
@@ -2006,21 +2005,21 @@ class OBJECT_OT_UTSelectedFaceSmooth(bpy.types.Operator):
else:
print("Didn't select Mesh Object!")
self.report({'INFO'}, "Didn't Select Mesh Object!")
- print("----------------------------------------")
+ print("----------------------------------------")
return{'FINISHED'}
-
+
class OBJECT_OT_MeshClearWeights(bpy.types.Operator):
"""Remove all mesh vertex groups weights for the bones."""
bl_idname = "object.meshclearweights" # XXX, name???
bl_label = "Remove Vertex Weights"#"Remove Mesh vertex weights"
-
+
def invoke(self, context, event):
for obj in bpy.data.objects:
if obj.type == 'MESH' and obj.select == True:
for vg in obj.vertex_groups:
obj.vertex_groups.remove(vg)
self.report({'INFO'}, "Mesh Vertex Groups Remove!")
- break
+ break
return{'FINISHED'}
def unpack_list(list_of_tuples):
@@ -2036,7 +2035,7 @@ def rebuildmesh(obj):
for i in bpy.context.scene.objects: i.select = False #deselect all objects
obj.select = True
bpy.context.scene.objects.active = obj
-
+
me_ob = bpy.data.meshes.new(("Re_"+obj.name))
mesh = obj.data
faces = []
@@ -2061,7 +2060,7 @@ def rebuildmesh(obj):
faces.extend([(face.vertices[0],face.vertices[1],face.vertices[2],face.vertices[3])])
#vertex positions
for vertex in mesh.vertices:
- verts.append(vertex.co.to_tuple())
+ verts.append(vertex.co.to_tuple())
#vertices weight groups into array
vertGroups = {} #array in strings
for vgroup in obj.vertex_groups:
@@ -2072,15 +2071,15 @@ def rebuildmesh(obj):
vlist.append((v.index,vg.weight))
#print((v.index,vg.weight))
vertGroups[vgroup.name] = vlist
-
+
#print("creating mesh object...")
#me_ob.from_pydata(verts, [], faces)
me_ob.vertices.add(len(verts))
me_ob.tessfaces.add(len(faces))
- me_ob.vertices.foreach_set("co", unpack_list(verts))
+ me_ob.vertices.foreach_set("co", unpack_list(verts))
me_ob.tessfaces.foreach_set("vertices_raw",unpack_list( faces))
me_ob.tessfaces.foreach_set("use_smooth", smoothings)#smooth array from face
-
+
#check if there is uv faces
if len(uvfaces) > 0:
uvtex = me_ob.tessface_uv_textures.new(name="retex")
@@ -2096,7 +2095,7 @@ def rebuildmesh(obj):
blender_tface.uv2 = mfaceuv[1];
blender_tface.uv3 = mfaceuv[2];
blender_tface.uv4 = mfaceuv[3];
-
+
me_ob.update()#need to update the information to able to see into the secne
obmesh = bpy.data.objects.new(("Re_"+obj.name),me_ob)
bpy.context.scene.update()
@@ -2129,7 +2128,7 @@ class OBJECT_OT_UTRebuildMesh(bpy.types.Operator):
"""Note the scale will be 1:1 for object mode. To keep from deforming"""
bl_idname = "object.utrebuildmesh" # XXX, name???
bl_label = "Rebuild Mesh"#"Rebuild Mesh"
-
+
def invoke(self, context, event):
print("----------------------------------------")
print("Init Mesh Bebuild...")
@@ -2155,7 +2154,7 @@ def rebuildarmature(obj):
for i in bpy.context.scene.objects: i.select = False #deselect all objects
ob_new.select = True
bpy.context.scene.objects.active = obj
-
+
bpy.ops.object.mode_set(mode='EDIT')
for bone in obj.data.edit_bones:
if bone.parent != None:
@@ -2166,7 +2165,7 @@ def rebuildarmature(obj):
for i in bpy.context.scene.objects: i.select = False #deselect all objects
bpy.context.scene.objects.active = ob_new
bpy.ops.object.mode_set(mode='EDIT')
-
+
for bone in obj.data.bones:
bpy.ops.object.mode_set(mode='EDIT')
newbone = ob_new.data.edit_bones.new(bone.name)
@@ -2185,14 +2184,14 @@ def rebuildarmature(obj):
ob_new.animation_data.action = obj.animation_data.action #just make sure it here to do the animations if exist
print("Armature Object Name:",ob_new.name)
return ob_new
-
+
class OBJECT_OT_UTRebuildArmature(bpy.types.Operator):
"""If mesh is deform when importing to unreal engine try this. """ \
"""It rebuild the bones one at the time by select one armature object scrape to raw setup build. """ \
"""Note the scale will be 1:1 for object mode. To keep from deforming"""
bl_idname = "object.utrebuildarmature" # XXX, name???
bl_label = "Rebuild Armature" #Rebuild Armature
-
+
def invoke(self, context, event):
print("----------------------------------------")
print("Init Rebuild Armature...")
@@ -2280,12 +2279,12 @@ class Panel_UDKExport( bpy.types.Panel ):
#bl_context = "object"
bl_space_type = "VIEW_3D"
bl_region_type = "TOOLS"
-
+
#def draw_header(self, context):
# layout = self.layout
#obj = context.object
#layout.prop(obj, "select", text="")
-
+
#@classmethod
#def poll(cls, context):
# return context.active_object
@@ -2312,7 +2311,7 @@ class Panel_UDKExport( bpy.types.Panel ):
#layout.separator()
layout.prop(context.scene, "udk_option_export")
layout.prop(context.scene, "udk_option_selectobjects")
-
+
if context.scene.udk_option_selectobjects:
layout.operator("object.selobjectpdate")
layout.label(text="ARMATURE - Index")
@@ -2335,7 +2334,7 @@ class Panel_UDKExport( bpy.types.Panel ):
row11.operator("object.udk_export")
row11.operator("object.toggle_console")
layout.operator(OBJECT_OT_UTRebuildArmature.bl_idname)
- layout.label(text="Mesh")
+ layout.label(text="Mesh")
layout.operator(OBJECT_OT_MeshClearWeights.bl_idname)
layout.operator(OBJECT_OT_UTSelectedFaceSmooth.bl_idname)
layout.operator(OBJECT_OT_UTRebuildMesh.bl_idname)
@@ -2377,7 +2376,7 @@ def udkupdateobjects():
my_objlist.remove(count)
break
count += 1
-
+
my_objlist = bpy.context.scene.udkmesh_list
objectl = []
for objarm in bpy.context.scene.objects:#list and filter only mesh and armature
@@ -2412,14 +2411,14 @@ def udkupdateobjects():
my_objlist.remove(count)
break
count += 1
-
+
class OBJECT_OT_UDKObjUpdate(bpy.types.Operator):
"""This will update the filter of the mesh and armature."""
bl_idname = "object.selobjectpdate"
bl_label = "Update Object(s)"
-
+
actionname = bpy.props.StringProperty()
-
+
def execute(self, context):
udkupdateobjects()
return{'FINISHED'}
@@ -2439,7 +2438,7 @@ def udkcheckmeshline():
bpy.ops.object.mode_set(mode='EDIT') #set in edit mode
bpy.ops.mesh.select_all(action='DESELECT')
bpy.context.tool_settings.mesh_select_mode = (True, False, False) #select vertices
-
+
if objmesh != None:
print("found mesh")
print(objmesh)
@@ -2465,7 +2464,7 @@ def udkcheckmeshline():
norm = FVector(no[0], no[1], no[2])
tnorm = vect_list[1].sub(vect_list[0]).cross(vect_list[2].sub(vect_list[1]))
dot = norm.dot(tnorm)
-
+
tri = VTriangle()
if dot > 0:
(tri.WedgeIndex2, tri.WedgeIndex1, tri.WedgeIndex0) = wedge_list
@@ -2478,7 +2477,7 @@ def udkcheckmeshline():
vertex_list.append(dindex0)
vertex_list.append(dindex1)
vertex_list.append(dindex2)
-
+
bpy.ops.object.mode_set(mode='OBJECT')
for vertex in objmesh.data.vertices: #loop all vertex in the mesh list
for vl in vertex_list: #loop for error vertex
@@ -2498,7 +2497,7 @@ class OBJECT_OT_UDKCheckMeshLines(bpy.types.Operator):
"""If the vertices share the same position it will causes an bug."""
bl_idname = "object.udkcheckmeshline"
bl_label = "Check Mesh Vertices"
-
+
def execute(self, context):
message = udkcheckmeshline()
self.report({'ERROR'}, message)
@@ -2511,10 +2510,10 @@ class OBJECT_OT_ActionSetAnimUpdate(bpy.types.Operator):
bl_label = "Update Action Set(s)"
actionname = bpy.props.StringProperty()
-
+
def execute(self, context):
my_sett = bpy.context.scene.udkas_list
-
+
bones = []
armature = None
armatures = []
@@ -2525,14 +2524,14 @@ class OBJECT_OT_ActionSetAnimUpdate(bpy.types.Operator):
armatures.append(objarm)
if objarm.select == True:
armatureselected.append(objarm)
-
+
if len(armatureselected) == len(armatures) == 1:
armature = armatures[0]
if len(armatures) == 1:
armature = armatures[0]
- if len(armatureselected) == 1:
+ if len(armatureselected) == 1:
armature = armatureselected[0]
-
+
if armature != None:
for bone in armature.pose.bones:
bones.append(bone.name)
@@ -2565,7 +2564,7 @@ class OBJECT_OT_ActionSetAnimUpdate(bpy.types.Operator):
my_item.bmatch = True
else:
my_item.bmatch = False
- removeactions = []
+ removeactions = []
#check action list and data actions
for actionlist in bpy.context.scene.udkas_list:
bfind = False
@@ -2578,7 +2577,7 @@ class OBJECT_OT_ActionSetAnimUpdate(bpy.types.Operator):
#print("ACT NAME:",actionlist.name," COUNT",notfound)
if notfound == len(bpy.data.actions):
#print("remove :",actionlist.name)
- removeactions.append(actionlist.name)
+ removeactions.append(actionlist.name)
#print("Not in the action data list:",len(removeactions))
#remove list or chnages in the name the template list
for actname in removeactions:
@@ -2589,8 +2588,8 @@ class OBJECT_OT_ActionSetAnimUpdate(bpy.types.Operator):
my_sett.remove(actioncount);
break
actioncount += 1
- return{'FINISHED'}
-
+ return{'FINISHED'}
+
class ExportUDKAnimData(bpy.types.Operator):
"""Export Skeleton Mesh / Animation Data file(s). """ \
"""One mesh and one armature else select one mesh or armature to be exported"""
@@ -2614,7 +2613,7 @@ class ExportUDKAnimData(bpy.types.Operator):
udk_option_export = bpy.types.Scene.udk_option_export
udk_option_scale = bpy.types.Scene.udk_option_scale
udk_option_rebuildobjects = bpy.types.Scene.udk_option_rebuildobjects
-
+
@classmethod
def poll(cls, context):
return context.active_object != None
@@ -2625,12 +2624,12 @@ class ExportUDKAnimData(bpy.types.Operator):
scene.udk_option_export_psa = (scene.udk_option_export == '1' or scene.udk_option_export == '2')
bpy.context.scene.udk_option_scale = self.udk_option_scale
bpy.context.scene.udk_option_rebuildobjects = self.udk_option_rebuildobjects
-
+
filepath = get_dst_path()
-
+
# cache settings
restore_frame = scene.frame_current
-
+
message = "Finish Export!"
try:
export(filepath)
@@ -2638,18 +2637,18 @@ class ExportUDKAnimData(bpy.types.Operator):
except Error as err:
print(err.message)
message = err.message
-
+
# restore settings
scene.frame_set(restore_frame)
-
+
self.report({'WARNING', 'INFO'}, message)
return {'FINISHED'}
-
+
def invoke(self, context, event):
wm = context.window_manager
wm.fileselect_add(self)
return {'RUNNING_MODAL'}
-
+
def menu_func(self, context):
default_path = os.path.splitext(bpy.data.filepath)[0] + ".psk"
self.layout.operator(ExportUDKAnimData.bl_idname, text="Skeleton Mesh / Animation Data (.psk/.psa)").filepath = default_path
@@ -2661,17 +2660,17 @@ def register():
#print("REGISTER")
bpy.utils.register_module(__name__)
bpy.types.INFO_MT_file_export.append(menu_func)
-
+
def unregister():
#print("UNREGISTER")
bpy.utils.unregister_module(__name__)
bpy.types.INFO_MT_file_export.remove(menu_func)
-
+
if __name__ == "__main__":
#print("\n"*4)
print(header("UDK Export PSK/PSA 2.6", 'CENTER'))
register()
-
+
#loader
#filename = "D:/Projects/BlenderScripts/io_export_udk_psa_psk_alpha.py"
#exec(compile(open(filename).read(), filename, 'exec'))
diff --git a/io_import_gimp_image_to_scene.py b/io_import_gimp_image_to_scene.py
index 71e8d3ed..971770a5 100644
--- a/io_import_gimp_image_to_scene.py
+++ b/io_import_gimp_image_to_scene.py
@@ -26,8 +26,7 @@ bl_info = {
"warning": "XCF import requires xcftools installed",
"wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/"
"Scripts/Import-Export/GIMPImageToScene",
- "tracker_url": "http://projects.blender.org/tracker/index.php?"
- "func=detail&aid=25136",
+ "tracker_url": "https://developer.blender.org/T25136",
"category": "Import-Export"}
"""
diff --git a/io_import_images_as_planes.py b/io_import_images_as_planes.py
index 860ae00e..b3dcb65a 100644
--- a/io_import_images_as_planes.py
+++ b/io_import_images_as_planes.py
@@ -26,7 +26,7 @@ bl_info = {
"The images are mapped to the planes.",
"warning": "",
"wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/Scripts/Add_Mesh/Planes_from_Images",
- "tracker_url": "https://projects.blender.org/tracker/index.php?func=detail&aid=21751",
+ "tracker_url": "https://developer.blender.org/T21751",
"category": "Import-Export"}
import bpy
diff --git a/io_import_scene_dxf.py b/io_import_scene_dxf.py
index f45d964d..432e9cda 100644
--- a/io_import_scene_dxf.py
+++ b/io_import_scene_dxf.py
@@ -26,8 +26,7 @@ bl_info = {
"warning": "Under construction! Visit Wiki for details.",
"wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/"
"Scripts/Import-Export/DXF_Importer",
- "tracker_url": "https://projects.blender.org/tracker/index.php?"
- "func=detail&aid=23480",
+ "tracker_url": "https://developer.blender.org/T23480",
"support": "OFFICIAL",
"category": "Import-Export",
}
diff --git a/io_import_scene_lwo.py b/io_import_scene_lwo.py
index e711718d..b01763f3 100644
--- a/io_import_scene_lwo.py
+++ b/io_import_scene_lwo.py
@@ -22,13 +22,12 @@ bl_info= {
"version": (1, 2),
"blender": (2, 57, 0),
"location": "File > Import > LightWave Object (.lwo)",
- "description": "Imports a LWO file including any UV, Morph and Color maps. "\
+ "description": "Imports a LWO file including any UV, Morph and Color maps. "
"Can convert Skelegons to an Armature.",
"warning": "",
- "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/"\
+ "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/"
"Scripts/Import-Export/LightWave_Object",
- "tracker_url": "https://projects.blender.org/tracker/index.php?"\
- "func=detail&aid=23623",
+ "tracker_url": "https://developer.blender.org/T23623",
"category": "Import-Export"}
# Copyright (c) Ken Nign 2010
@@ -621,22 +620,22 @@ def read_weight_vmad(ew_bytes, object_layers):
offset+= pol_id_len
weight,= struct.unpack(">f", ew_bytes[offset:offset+4])
offset+= 4
-
+
face_pnts= object_layers[-1].pols[pol_id]
try:
# Find the point's location in the polygon's point list
first_idx= face_pnts.index(pnt_id)
except:
continue
-
+
# Then get the next point in the list, or wrap around to the first
if first_idx == len(face_pnts) - 1:
second_pnt= face_pnts[0]
else:
second_pnt= face_pnts[first_idx + 1]
-
+
object_layers[-1].edge_weights["{0} {1}".format(second_pnt, pnt_id)]= weight
-
+
def read_pols(pol_bytes, object_layers):
"""Read the layer's polygons, each one is just a list of point indexes."""
diff --git a/io_import_scene_mhx.py b/io_import_scene_mhx.py
index bfb68956..0aa99f56 100644
--- a/io_import_scene_mhx.py
+++ b/io_import_scene_mhx.py
@@ -44,8 +44,7 @@ bl_info = {
'description': 'Import files in the MakeHuman eXchange format (.mhx)',
'warning': '',
'wiki_url': 'http://www.makehuman.org/documentation',
- 'tracker_url': 'https://projects.blender.org/tracker/index.php?'\
- 'func=detail&aid=21872',
+ 'tracker_url': 'https://developer.blender.org/T21872',
'category': 'Import-Export'}
MAJOR_VERSION = 1
diff --git a/io_import_scene_unreal_psa_psk.py b/io_import_scene_unreal_psa_psk.py
index 64f0e6d0..ec4bc27a 100644
--- a/io_import_scene_unreal_psa_psk.py
+++ b/io_import_scene_unreal_psa_psk.py
@@ -26,8 +26,7 @@ bl_info = {
"warning": "",
"wiki_url": "http://wiki.blender.org/index.php/Extensions:2.5/Py/"
"Scripts/Import-Export/Unreal_psk_psa",
- "tracker_url": "https://projects.blender.org/tracker/index.php?"\
- "func=detail&aid=21366",
+ "tracker_url": "https://developer.blender.org/T21366",
"category": "Import-Export"}
"""
diff --git a/io_mesh_pdb/__init__.py b/io_mesh_pdb/__init__.py
index 1d2a7cec..a3d31fb5 100644
--- a/io_mesh_pdb/__init__.py
+++ b/io_mesh_pdb/__init__.py
@@ -26,9 +26,9 @@
# First publication in Blender : 2011-11-11
# Last modified : 2013-04-04
#
-# Acknowledgements
+# Acknowledgements
# ================
-# Blender: ideasman, meta_androcto, truman, kilon, CoDEmanX, dairin0d, PKHG,
+# Blender: ideasman, meta_androcto, truman, kilon, CoDEmanX, dairin0d, PKHG,
# Valter, ...
# Other : Frank Palmino
#
@@ -44,12 +44,11 @@ bl_info = {
"blender": (2, 60, 0),
"location": "File -> Import -> PDB (.pdb)",
"warning": "",
- "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/"
- "Py/Scripts/Import-Export/PDB",
- "tracker_url": "http://projects.blender.org/tracker/"
- "index.php?func=detail&aid=29226",
- "category": "Import-Export"
-}
+ "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/"
+ "Scripts/Import-Export/PDB",
+ "tracker_url": "https://developer.blender.org/T29226",
+ "category": "Import-Export"}
+
import bpy
from bpy.types import Operator
@@ -77,7 +76,7 @@ class ImportPDB(Operator, ImportHelper):
use_center = BoolProperty(
name = "Object to origin", default=True,
- description = "Put the object into the global origin")
+ description = "Put the object into the global origin")
use_camera = BoolProperty(
name="Camera", default=False,
description="Do you need a camera?")
@@ -90,7 +89,7 @@ class ImportPDB(Operator, ImportHelper):
items=(('0', "NURBS", "NURBS balls"),
('1', "Mesh" , "Mesh balls"),
('2', "Meta" , "Metaballs")),
- default='0',)
+ default='0',)
mesh_azimuth = IntProperty(
name = "Azimuth", default=32, min=1,
description = "Number of sectors (azimuth)")
@@ -109,23 +108,23 @@ class ImportPDB(Operator, ImportHelper):
items=(('0', "Pre-defined", "Use pre-defined radius"),
('1', "Atomic", "Use atomic radius"),
('2', "van der Waals", "Use van der Waals radius")),
- default='0',)
+ default='0',)
use_sticks = BoolProperty(
name="Use sticks", default=True,
- description="Do you want to display the sticks?")
+ description="Do you want to display the sticks?")
use_sticks_type = EnumProperty(
name="Type",
description="Choose type of stick",
items=(('0', "Dupliverts", "Use dupliverts structures"),
('1', "Skin", "Use skin and subdivision modifier"),
('2', "Normal", "Use simple cylinders")),
- default='0',)
+ default='0',)
sticks_subdiv_view = IntProperty(
name = "SubDivV", default=2, min=1,
- description="Number of subdivisions (view)")
+ description="Number of subdivisions (view)")
sticks_subdiv_render = IntProperty(
name = "SubDivR", default=2, min=1,
- description="Number of subdivisions (render)")
+ description="Number of subdivisions (render)")
sticks_sectors = IntProperty(
name = "Sector", default=20, min=1,
description="Number of sectors of a stick")
@@ -134,25 +133,25 @@ class ImportPDB(Operator, ImportHelper):
description ="Radius of a stick")
sticks_unit_length = FloatProperty(
name = "Unit", default=0.05, min=0.0001,
- description = "Length of the unit of a stick in Angstrom")
+ description = "Length of the unit of a stick in Angstrom")
use_sticks_color = BoolProperty(
name="Color", default=True,
description="The sticks appear in the color of the atoms")
use_sticks_smooth = BoolProperty(
name="Smooth", default=True,
- description="The sticks are round (sectors are not visible)")
+ description="The sticks are round (sectors are not visible)")
use_sticks_bonds = BoolProperty(
name="Bonds", default=False,
description="Show double and tripple bonds.")
sticks_dist = FloatProperty(
name="", default = 1.1, min=1.0, max=3.0,
- description="Distance between sticks measured in stick diameter")
+ description="Distance between sticks measured in stick diameter")
use_sticks_one_object = BoolProperty(
name="One object", default=True,
- description="All sticks are one object.")
+ description="All sticks are one object.")
use_sticks_one_object_nr = IntProperty(
name = "No.", default=200, min=10,
- description="Number of sticks to be grouped at once")
+ description="Number of sticks to be grouped at once")
datafile = StringProperty(
name = "", description="Path to your custom data file",
maxlen = 256, default = "", subtype='FILE_PATH')
@@ -163,17 +162,17 @@ class ImportPDB(Operator, ImportHelper):
row.prop(self, "use_camera")
row.prop(self, "use_lamp")
row = layout.row()
- row.prop(self, "use_center")
+ row.prop(self, "use_center")
# Balls
- box = layout.box()
- row = box.row()
- row.label(text="Balls / atoms")
+ box = layout.box()
+ row = box.row()
+ row.label(text="Balls / atoms")
row = box.row()
col = row.column()
col.prop(self, "ball")
row = box.row()
row.active = (self.ball == "1")
- col = row.column(align=True)
+ col = row.column(align=True)
col.prop(self, "mesh_azimuth")
col.prop(self, "mesh_zenith")
row = box.row()
@@ -184,55 +183,55 @@ class ImportPDB(Operator, ImportHelper):
col.prop(self, "scale_distances")
row = box.row()
row.prop(self, "atomradius")
- # Sticks
- box = layout.box()
- row = box.row()
+ # Sticks
+ box = layout.box()
+ row = box.row()
row.label(text="Sticks / bonds")
- row = box.row()
+ row = box.row()
row.prop(self, "use_sticks")
row = box.row()
- row.active = self.use_sticks
+ row.active = self.use_sticks
row.prop(self, "use_sticks_type")
- row = box.row()
+ row = box.row()
row.active = self.use_sticks
col = row.column()
- if self.use_sticks_type == '0' or self.use_sticks_type == '2':
+ if self.use_sticks_type == '0' or self.use_sticks_type == '2':
col.prop(self, "sticks_sectors")
col.prop(self, "sticks_radius")
- if self.use_sticks_type == '1':
- row = box.row()
+ if self.use_sticks_type == '1':
+ row = box.row()
row.active = self.use_sticks
row.prop(self, "sticks_subdiv_view")
row.prop(self, "sticks_subdiv_render")
- row = box.row()
- row.active = self.use_sticks
- if self.use_sticks_type == '0':
+ row = box.row()
+ row.active = self.use_sticks
+ if self.use_sticks_type == '0':
col.prop(self, "sticks_unit_length")
- col = row.column(align=True)
- if self.use_sticks_type == '0':
- col.prop(self, "use_sticks_color")
+ col = row.column(align=True)
+ if self.use_sticks_type == '0':
+ col.prop(self, "use_sticks_color")
col.prop(self, "use_sticks_smooth")
if self.use_sticks_type == '0' or self.use_sticks_type == '2':
col.prop(self, "use_sticks_bonds")
- row = box.row()
- if self.use_sticks_type == '0':
+ row = box.row()
+ if self.use_sticks_type == '0':
row.active = self.use_sticks and self.use_sticks_bonds
row.label(text="Distance")
row.prop(self, "sticks_dist")
if self.use_sticks_type == '2':
- row.active = self.use_sticks
- col = row.column()
+ row.active = self.use_sticks
+ col = row.column()
col.prop(self, "use_sticks_one_object")
col = row.column()
col.active = self.use_sticks_one_object
col.prop(self, "use_sticks_one_object_nr")
-
-
+
+
def execute(self, context):
# This is in order to solve this strange 'relative path' thing.
filepath_pdb = bpy.path.abspath(self.filepath)
- # Execute main routine
+ # Execute main routine
import_pdb.import_pdb(
self.ball,
self.mesh_azimuth,
@@ -256,7 +255,7 @@ class ImportPDB(Operator, ImportHelper):
self.use_center,
self.use_camera,
self.use_lamp,
- filepath_pdb)
+ filepath_pdb)
return {'FINISHED'}
@@ -276,7 +275,7 @@ class ExportPDB(Operator, ExportHelper):
items=(('0', "All", "Export all active objects"),
('1', "Elements", "Export only those active objects which have"
" a proper element name")),
- default='1',)
+ default='1',)
def draw(self, context):
layout = self.layout
@@ -302,7 +301,7 @@ def register():
bpy.utils.register_module(__name__)
bpy.types.INFO_MT_file_import.append(menu_func_import)
bpy.types.INFO_MT_file_export.append(menu_func_export)
-
+
def unregister():
bpy.utils.unregister_module(__name__)
bpy.types.INFO_MT_file_import.remove(menu_func_import)
diff --git a/io_mesh_raw/__init__.py b/io_mesh_raw/__init__.py
index 3e8c7123..a3f087ef 100644
--- a/io_mesh_raw/__init__.py
+++ b/io_mesh_raw/__init__.py
@@ -28,9 +28,8 @@ bl_info = {
"description": "Import-Export Raw Faces",
"warning": "",
"wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/"
- "Scripts/Import-Export/Raw_Mesh_IO",
- "tracker_url": "https://projects.blender.org/tracker/index.php?"
- "func=detail&aid=25692",
+ "Scripts/Import-Export/Raw_Mesh_IO",
+ "tracker_url": "https://developer.blender.org/T25692",
"category": "Import-Export"}
if "bpy" in locals():
diff --git a/io_mesh_stl/__init__.py b/io_mesh_stl/__init__.py
index 5260b5c0..4074aec5 100644
--- a/io_mesh_stl/__init__.py
+++ b/io_mesh_stl/__init__.py
@@ -27,12 +27,12 @@ bl_info = {
"description": "Import-Export STL files",
"warning": "",
"wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/"
- "Scripts/Import-Export/STL",
- "tracker_url": "https://projects.blender.org/tracker/index.php?"
- "func=detail&aid=22837",
+ "Scripts/Import-Export/STL",
+ "tracker_url": "https://developer.blender.org/T22837",
"support": 'OFFICIAL',
"category": "Import-Export"}
+
# @todo write the wiki page
"""
diff --git a/io_mesh_uv_layout/__init__.py b/io_mesh_uv_layout/__init__.py
index d467dcf5..9131654b 100644
--- a/io_mesh_uv_layout/__init__.py
+++ b/io_mesh_uv_layout/__init__.py
@@ -27,12 +27,12 @@ bl_info = {
"description": "Export the UV layout as a 2D graphic",
"warning": "",
"wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/"
- "Scripts/Import-Export/UV_Layout",
- "tracker_url": "https://projects.blender.org/tracker/index.php?"
- "func=detail&aid=22837",
+ "Scripts/Import-Export/UV_Layout",
+ "tracker_url": "https://developer.blender.org/T22837",
"support": 'OFFICIAL',
"category": "Import-Export"}
+
# @todo write the wiki page
if "bpy" in locals():
diff --git a/io_scene_ms3d/__init__.py b/io_scene_ms3d/__init__.py
index fbb31626..c1b54359 100644
--- a/io_scene_ms3d/__init__.py
+++ b/io_scene_ms3d/__init__.py
@@ -20,19 +20,18 @@
bl_info = {
'name': "MilkShape3D MS3D format (.ms3d)",
- 'description': "Import / Export MilkShape3D MS3D files"\
- " (conform with MilkShape3D v1.8.4)",
+ 'description': "Import / Export MilkShape3D MS3D files "
+ "(conform with MilkShape3D v1.8.4)",
'author': "Alexander Nussbaumer",
'version': (1, 0, 0),
'blender': (2, 69, 2),
'location': "File > Import & File > Export",
'warning': "",
- 'wiki_url': "http://wiki.blender.org/index.php/Extensions:2.6/Py/"\
+ 'wiki_url': "http://wiki.blender.org/index.php/Extensions:2.6/Py/"
"Scripts/Import-Export/MilkShape3D_MS3D",
- 'tracker_url': "http://projects.blender.org/tracker/index.php"\
- "?func=detail&aid=34084",
- 'category': "Import-Export",
- }
+ 'tracker_url': "https://developer.blender.org/T34084",
+ 'category': "Import-Export"}
+
###############################################################################
#234567890123456789012345678901234567890123456789012345678901234567890123456789
diff --git a/io_scene_x/__init__.py b/io_scene_x/__init__.py
index 61f8d5f7..d320927c 100644
--- a/io_scene_x/__init__.py
+++ b/io_scene_x/__init__.py
@@ -24,14 +24,14 @@ bl_info = {
"version": (3, 1, 0),
"blender": (2, 69, 0),
"location": "File > Export > DirectX (.x)",
- "description": "Export mesh vertices, UV's, materials, textures, "\
+ "description": "Export mesh vertices, UV's, materials, textures, "
"vertex colors, armatures, empties, and actions.",
- "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/"\
+ "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/"
"Scripts/Import-Export/DirectX_Exporter",
- "tracker_url": "https://projects.blender.org/tracker/index.php?"\
- "func=detail&aid=22795",
+ "tracker_url": "https://developer.blender.org/T22795",
"category": "Import-Export"}
+
import bpy
from bpy.props import BoolProperty
from bpy.props import EnumProperty
@@ -45,43 +45,43 @@ class ExportDirectX(bpy.types.Operator):
bl_label = "Export DirectX"
filepath = StringProperty(subtype='FILE_PATH')
-
+
# Export options
SelectedOnly = BoolProperty(
name="Export Selected Objects Only",
description="Export only selected objects",
default=True)
-
+
CoordinateSystem = EnumProperty(
name="Coordinate System",
description="Use the selected coordinate system for export",
items=(('LEFT_HANDED', "Left-Handed", "Use a Y up, Z forward system or a Z up, -Y forward system"),
('RIGHT_HANDED', "Right-Handed", "Use a Y up, -Z forward system or a Z up, Y forward system")),
default='LEFT_HANDED')
-
+
UpAxis = EnumProperty(
name="Up Axis",
description="The selected axis points upward",
items=(('Y', "Y", "The Y axis points up"),
('Z', "Z", "The Z axis points up")),
default='Y')
-
+
ExportMeshes = BoolProperty(
name="Export Meshes",
description="Export mesh objects",
default=True)
-
+
ExportNormals = BoolProperty(
name=" Export Normals",
description="Export mesh normals",
default=True)
-
+
FlipNormals = BoolProperty(
name=" Flip Normals",
description="Flip mesh normals before export",
default=False)
-
+
ExportUVCoordinates = BoolProperty(
name=" Export UV Coordinates",
description="Export mesh UV coordinates, if any",
@@ -91,33 +91,33 @@ class ExportDirectX(bpy.types.Operator):
name=" Export Materials",
description="Export material properties and reference image textures",
default=True)
-
+
ExportActiveImageMaterials = BoolProperty(
name=" Reference Active Images as Textures",
description="Reference the active image of each face as a texture, "\
"as opposed to the image assigned to the material",
default=False)
-
+
ExportVertexColors = BoolProperty(
name=" Export Vertex Colors",
description="Export mesh vertex colors, if any",
default=False)
-
+
ExportSkinWeights = BoolProperty(
name=" Export Skin Weights",
description="Bind mesh vertices to armature bones",
default=False)
-
+
ApplyModifiers = BoolProperty(
name=" Apply Modifiers",
description="Apply the effects of object modifiers before export",
default=False)
-
+
ExportArmatureBones = BoolProperty(
name="Export Armature Bones",
description="Export armatures bones",
default=False)
-
+
ExportRestBone = BoolProperty(
name=" Export Rest Position",
description="Export bones in their rest position (recommended for "\
@@ -135,14 +135,14 @@ class ExportDirectX(bpy.types.Operator):
description="Include the AnimTicksPerSecond template which is "\
"used by some engines to control animation speed",
default=False)
-
+
ExportActionsAsSets = BoolProperty(
name=" Export Actions as AnimationSets",
description="Export each action of each object as a separate "\
"AnimationSet. Otherwise all current actions are lumped "\
"together into a single set",
default=False)
-
+
AttachToFirstArmature = BoolProperty(
name=" Attach Unused Actions to First Armature",
description="Export each unused action as if used by the first "\
diff --git a/light_field_tools/__init__.py b/light_field_tools/__init__.py
index 08ff8c37..2bb769c0 100644
--- a/light_field_tools/__init__.py
+++ b/light_field_tools/__init__.py
@@ -25,8 +25,9 @@ bl_info = {
"blender": (2, 64, 0),
"location": "View3D > Tool Shelf > Light Field Tools",
"url": "http://www.jku.at/cg/",
- "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/Scripts/Render/Light_Field_Tools",
- "tracker_url": "http://projects.blender.org/tracker/index.php?func=detail&aid=25719",
+ "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/"
+ "Scripts/Render/Light_Field_Tools",
+ "tracker_url": "https://developer.blender.org/T25719",
"category": "Render"
}
diff --git a/mesh_bsurfaces.py b/mesh_bsurfaces.py
index 983e426b..f3c1a0c3 100644
--- a/mesh_bsurfaces.py
+++ b/mesh_bsurfaces.py
@@ -20,16 +20,14 @@
bl_info = {
"name": "Bsurfaces GPL Edition",
"author": "Eclectiel",
- "version": (1,5),
+ "version": (1, 5),
"blender": (2, 63, 0),
- "api": 45996,
"location": "View3D > EditMode > ToolShelf",
"description": "Modeling and retopology tool.",
"wiki_url": "http://wiki.blender.org/index.php/Dev:Ref/Release_Notes/2.64/Bsurfaces_1.5",
- "tracker_url": "http://projects.blender.org/tracker/index.php?"\
- "func=detail&aid=26642",
+ "tracker_url": "https://developer.blender.org/T26642",
"category": "Mesh"}
-
+
import bpy
import bmesh
@@ -48,18 +46,18 @@ class VIEW3D_PT_tools_SURFSK_mesh(bpy.types.Panel):
bl_category = 'Tools'
bl_context = "mesh_edit"
bl_label = "Bsurfaces"
-
+
@classmethod
def poll(cls, context):
return context.active_object
-
-
+
+
def draw(self, context):
layout = self.layout
-
+
scn = context.scene
ob = context.object
-
+
col = layout.column(align=True)
row = layout.row()
row.separator()
@@ -70,33 +68,33 @@ class VIEW3D_PT_tools_SURFSK_mesh(bpy.types.Panel):
col.prop(scn, "SURFSK_loops_on_strokes")
col.prop(scn, "SURFSK_automatic_join")
col.prop(scn, "SURFSK_keep_strokes")
-
-
-
+
+
+
class VIEW3D_PT_tools_SURFSK_curve(bpy.types.Panel):
bl_space_type = 'VIEW_3D'
bl_region_type = 'TOOLS'
bl_context = "curve_edit"
bl_label = "Bsurfaces"
-
+
@classmethod
def poll(cls, context):
return context.active_object
-
-
+
+
def draw(self, context):
layout = self.layout
-
+
scn = context.scene
ob = context.object
-
+
col = layout.column(align=True)
row = layout.row()
row.separator()
col.operator("curve.surfsk_first_points", text="Set First Points")
col.operator("curve.switch_direction", text="Switch Direction")
col.operator("curve.surfsk_reorder_splines", text="Reorder Splines")
-
+
@@ -104,18 +102,18 @@ class VIEW3D_PT_tools_SURFSK_curve(bpy.types.Panel):
def get_strokes_type(main_object):
strokes_type = ""
strokes_num = 0
-
+
# Check if they are grease pencil
try:
#### Get the active grease pencil layer.
strokes_num = len(main_object.grease_pencil.layers.active.active_frame.strokes)
-
+
if strokes_num > 0:
strokes_type = "GP_STROKES"
except:
pass
-
-
+
+
# Check if they are curves, if there aren't grease pencil strokes.
if strokes_type == "":
if len(bpy.context.selected_objects) == 2:
@@ -123,35 +121,35 @@ def get_strokes_type(main_object):
if ob != bpy.context.scene.objects.active and ob.type == "CURVE":
strokes_type = "EXTERNAL_CURVE"
strokes_num = len(ob.data.splines)
-
+
# Check if there is any non-bezier spline.
for i in range(len(ob.data.splines)):
if ob.data.splines[i].type != "BEZIER":
strokes_type = "CURVE_WITH_NON_BEZIER_SPLINES"
break
-
+
elif ob != bpy.context.scene.objects.active and ob.type != "CURVE":
strokes_type = "EXTERNAL_NO_CURVE"
elif len(bpy.context.selected_objects) > 2:
strokes_type = "MORE_THAN_ONE_EXTERNAL"
-
-
+
+
# Check if there is a single stroke without any selection in the object.
if strokes_num == 1 and main_object.data.total_vert_sel == 0:
if strokes_type == "EXTERNAL_CURVE":
strokes_type = "SINGLE_CURVE_STROKE_NO_SELECTION"
elif strokes_type == "GP_STROKES":
strokes_type = "SINGLE_GP_STROKE_NO_SELECTION"
-
+
if strokes_num == 0 and main_object.data.total_vert_sel > 0:
strokes_type = "SELECTION_ALONE"
-
-
+
+
if strokes_type == "":
strokes_type = "NO_STROKES"
-
-
-
+
+
+
return strokes_type
@@ -163,96 +161,96 @@ class GPENCIL_OT_SURFSK_add_surface(bpy.types.Operator):
bl_label = "Bsurfaces add surface"
bl_description = "Generates surfaces from grease pencil strokes, bezier curves or loose edges."
bl_options = {'REGISTER', 'UNDO'}
-
-
+
+
edges_U = bpy.props.IntProperty(name = "Cross",
description = "Number of face-loops crossing the strokes.",
default = 1,
min = 1,
max = 200)
-
+
edges_V = bpy.props.IntProperty(name = "Follow",
description = "Number of face-loops following the strokes.",
default = 1,
min = 1,
max = 200)
-
+
cyclic_cross = bpy.props.BoolProperty(name = "Cyclic Cross",
description = "Make cyclic the face-loops crossing the strokes.",
default = False)
-
+
cyclic_follow = bpy.props.BoolProperty(name = "Cyclic Follow",
description = "Make cyclic the face-loops following the strokes.",
default = False)
-
+
loops_on_strokes = bpy.props.BoolProperty(name = "Loops on strokes",
description = "Make the loops match the paths of the strokes.",
default = False)
-
+
automatic_join = bpy.props.BoolProperty(name = "Automatic join",
description = "Join automatically vertices of either surfaces generated by crosshatching, or from the borders of closed shapes.",
default = False)
-
+
join_stretch_factor = bpy.props.FloatProperty(name = "Stretch",
description = "Amount of stretching or shrinking allowed for edges when joining vertices automatically.",
default = 1,
min = 0,
max = 3,
subtype = 'FACTOR')
-
-
-
-
+
+
+
+
def draw(self, context):
layout = self.layout
-
+
scn = context.scene
ob = context.object
-
+
col = layout.column(align=True)
row = layout.row()
-
+
if not self.is_fill_faces:
row.separator()
if not self.is_crosshatch:
if not self.selection_U_exists:
col.prop(self, "edges_U")
row.separator()
-
+
if not self.selection_V_exists:
col.prop(self, "edges_V")
row.separator()
-
+
row.separator()
-
+
if not self.selection_U_exists:
if not ((self.selection_V_exists and not self.selection_V_is_closed) or (self.selection_V2_exists and not self.selection_V2_is_closed)):
col.prop(self, "cyclic_cross")
-
+
if not self.selection_V_exists:
if not ((self.selection_U_exists and not self.selection_U_is_closed) or (self.selection_U2_exists and not self.selection_U2_is_closed)):
col.prop(self, "cyclic_follow")
-
-
+
+
col.prop(self, "loops_on_strokes")
-
- col.prop(self, "automatic_join")
-
+
+ col.prop(self, "automatic_join")
+
if self.automatic_join:
row.separator()
col.separator()
row.separator()
col.prop(self, "join_stretch_factor")
-
-
-
+
+
+
#### Get an ordered list of a chain of vertices.
def get_ordered_verts(self, ob, all_selected_edges_idx, all_selected_verts_idx, first_vert_idx, middle_vertex_idx, closing_vert_idx):
# Order selected vertices.
verts_ordered = []
if closing_vert_idx != None:
verts_ordered.append(ob.data.vertices[closing_vert_idx])
-
+
verts_ordered.append(ob.data.vertices[first_vert_idx])
prev_v = first_vert_idx
prev_ed = None
@@ -270,27 +268,27 @@ class GPENCIL_OT_SURFSK_add_surface(bpy.types.Operator):
prev_ed = ob.data.edges[i]
else:
edges_non_matched += 1
-
+
if edges_non_matched == len(all_selected_edges_idx):
finish_while = True
-
+
if finish_while:
break
-
+
if closing_vert_idx != None:
verts_ordered.append(ob.data.vertices[closing_vert_idx])
-
+
if middle_vertex_idx != None:
verts_ordered.append(ob.data.vertices[middle_vertex_idx])
verts_ordered.reverse()
-
+
return tuple(verts_ordered)
-
-
+
+
#### Calculates length of a chain of points.
def get_chain_length(self, object, verts_ordered):
matrix = object.matrix_world
-
+
edges_lengths = []
edges_lengths_sum = 0
for i in range(0, len(verts_ordered)):
@@ -298,18 +296,18 @@ class GPENCIL_OT_SURFSK_add_surface(bpy.types.Operator):
prev_v_co = matrix * verts_ordered[i].co
else:
v_co = matrix * verts_ordered[i].co
-
+
v_difs = [prev_v_co[0] - v_co[0], prev_v_co[1] - v_co[1], prev_v_co[2] - v_co[2]]
edge_length = abs(sqrt(v_difs[0] * v_difs[0] + v_difs[1] * v_difs[1] + v_difs[2] * v_difs[2]))
-
+
edges_lengths.append(edge_length)
edges_lengths_sum += edge_length
-
+
prev_v_co = v_co
-
+
return edges_lengths, edges_lengths_sum
-
-
+
+
#### Calculates the proportion of the edges of a chain of edges, relative to the full chain length.
def get_edges_proportions(self, edges_lengths, edges_lengths_sum, use_boundaries, fixed_edges_num):
edges_proportions = []
@@ -323,43 +321,43 @@ class GPENCIL_OT_SURFSK_add_surface(bpy.types.Operator):
for n in range(0, fixed_edges_num):
edges_proportions.append(1 / fixed_edges_num)
verts_count += 1
-
+
return edges_proportions
-
-
+
+
#### Calculates the angle between two pairs of points in space.
def orientation_difference(self, points_A_co, points_B_co): # each parameter should be a list with two elements, and each element should be a x,y,z coordinate.
vec_A = points_A_co[0] - points_A_co[1]
vec_B = points_B_co[0] - points_B_co[1]
-
+
angle = vec_A.angle(vec_B)
-
+
if angle > 0.5 * math.pi:
angle = abs(angle - math.pi)
-
+
return angle
-
-
-
+
+
+
#### Calculate the which vert of verts_idx list is the nearest one to the point_co coordinates, and the distance.
def shortest_distance(self, object, point_co, verts_idx):
matrix = object.matrix_world
-
+
for i in range(0, len(verts_idx)):
dist = (point_co - matrix * object.data.vertices[verts_idx[i]].co).length
if i == 0:
prev_dist = dist
nearest_vert_idx = verts_idx[i]
shortest_dist = dist
-
+
if dist < prev_dist:
prev_dist = dist
nearest_vert_idx = verts_idx[i]
shortest_dist = dist
-
+
return nearest_vert_idx, shortest_dist
-
-
+
+
#### Returns the index of the opposite vert tip in a chain, given a vert tip index as parameter, and a multidimentional list with all pairs of tips.
def opposite_tip(self, vert_tip_idx, all_chains_tips_idx):
opposite_vert_tip_idx = None
@@ -368,26 +366,26 @@ class GPENCIL_OT_SURFSK_add_surface(bpy.types.Operator):
opposite_vert_tip_idx = all_chains_tips_idx[i][1]
if vert_tip_idx == all_chains_tips_idx[i][1]:
opposite_vert_tip_idx = all_chains_tips_idx[i][0]
-
+
return opposite_vert_tip_idx
-
-
-
+
+
+
#### Simplifies a spline and returns the new points coordinates.
def simplify_spline(self, spline_coords, segments_num):
simplified_spline = []
points_between_segments = round(len(spline_coords) / segments_num)
-
+
simplified_spline.append(spline_coords[0])
for i in range(1, segments_num):
simplified_spline.append(spline_coords[i * points_between_segments])
-
+
simplified_spline.append(spline_coords[len(spline_coords) - 1])
-
+
return simplified_spline
-
-
-
+
+
+
#### Cleans up the scene and gets it the same it was at the beginning, in case the script is interrupted in the middle of the execution.
def cleanup_on_interruption(self):
# If the original strokes curve comes from conversion from grease pencil and wasn't made by hand, delete it.
@@ -396,11 +394,11 @@ class GPENCIL_OT_SURFSK_add_surface(bpy.types.Operator):
bpy.ops.object.select_all('INVOKE_REGION_WIN', action='DESELECT')
bpy.data.objects[self.original_curve.name].select = True
bpy.context.scene.objects.active = bpy.data.objects[self.original_curve.name]
-
+
bpy.ops.object.delete()
except:
pass
-
+
bpy.ops.object.select_all('INVOKE_REGION_WIN', action='DESELECT')
bpy.data.objects[self.main_object.name].select = True
bpy.context.scene.objects.active = bpy.data.objects[self.main_object.name]
@@ -409,11 +407,11 @@ class GPENCIL_OT_SURFSK_add_surface(bpy.types.Operator):
bpy.data.objects[self.original_curve.name].select = True
bpy.data.objects[self.main_object.name].select = True
bpy.context.scene.objects.active = bpy.data.objects[self.main_object.name]
-
+
bpy.ops.object.editmode_toggle('INVOKE_REGION_WIN')
-
-
-
+
+
+
#### Returns a list with the coords of the points distributed over the splines passed to this method according to the proportions parameter.
def distribute_pts(self, surface_splines, proportions):
# Calculate the length of each final surface spline.
@@ -427,112 +425,112 @@ class GPENCIL_OT_SURFSK_add_surface(bpy.types.Operator):
prev_p = surface_splines[sp_idx].bezier_points[i]
else:
p = surface_splines[sp_idx].bezier_points[i]
-
+
edge_length = (prev_p.co - p.co).length
-
+
surface_splines_lengths[sp_idx] += edge_length
-
+
prev_p = p
-
-
+
+
# Calculate vertex positions with appropriate edge proportions, and ordered, for each spline.
for sp_idx in range(0, len(surface_splines)):
surface_splines_parsed.append([])
surface_splines_parsed[sp_idx].append(surface_splines[sp_idx].bezier_points[0].co)
-
+
prev_p_co = surface_splines[sp_idx].bezier_points[0].co
p_idx = 0
for prop_idx in range(len(proportions) - 1):
target_length = surface_splines_lengths[sp_idx] * proportions[prop_idx]
-
+
partial_segment_length = 0
-
-
+
+
finish_while = False
while True:
p_co = surface_splines[sp_idx].bezier_points[p_idx].co
-
+
new_dist = (prev_p_co - p_co).length
-
+
potential_segment_length = partial_segment_length + new_dist # The new distance that could have the partial segment if it is still shorter than the target length.
-
-
+
+
if potential_segment_length < target_length: # If the potential is still shorter, keep adding.
partial_segment_length = potential_segment_length
-
+
p_idx += 1
prev_p_co = p_co
-
+
elif potential_segment_length > target_length: # If the potential is longer than the target, calculate the target (a point between the last two points), and assign.
remaining_dist = target_length - partial_segment_length
vec = p_co - prev_p_co
vec.normalize()
intermediate_co = prev_p_co + (vec * remaining_dist)
-
+
surface_splines_parsed[sp_idx].append(intermediate_co)
-
+
partial_segment_length += remaining_dist
prev_p_co = intermediate_co
-
+
finish_while = True
-
+
elif potential_segment_length == target_length: # If the potential is equal to the target, assign.
surface_splines_parsed[sp_idx].append(p_co)
-
+
prev_p_co = p_co
-
+
finish_while = True
-
+
if finish_while:
break
-
+
# last point of the spline
surface_splines_parsed[sp_idx].append(surface_splines[sp_idx].bezier_points[len(surface_splines[sp_idx].bezier_points) - 1].co)
-
-
+
+
return surface_splines_parsed
-
-
-
+
+
+
#### Counts the number of faces that belong to each edge.
def edge_face_count(self, ob):
ed_keys_count_dict = {}
-
+
for face in ob.data.polygons:
for ed_keys in face.edge_keys:
if not ed_keys in ed_keys_count_dict:
ed_keys_count_dict[ed_keys] = 1
else:
ed_keys_count_dict[ed_keys] += 1
-
-
+
+
edge_face_count = []
for i in range(len(ob.data.edges)):
edge_face_count.append(0)
-
+
for i in range(len(ob.data.edges)):
ed = ob.data.edges[i]
-
+
v1 = ed.vertices[0]
v2 = ed.vertices[1]
-
+
if (v1, v2) in ed_keys_count_dict:
edge_face_count[i] = ed_keys_count_dict[(v1, v2)]
elif (v2, v1) in ed_keys_count_dict:
edge_face_count[i] = ed_keys_count_dict[(v2, v1)]
-
-
+
+
return edge_face_count
-
-
-
+
+
+
#### Fills with faces all the selected vertices which form empty triangles or quads.
def fill_with_faces(self, object):
all_selected_verts_count = self.main_object_selected_verts_count
-
-
+
+
bpy.ops.object.mode_set('INVOKE_REGION_WIN', mode='OBJECT')
-
+
#### Calculate average length of selected edges.
all_selected_verts = []
original_sel_edges_count = 0
@@ -541,25 +539,25 @@ class GPENCIL_OT_SURFSK_add_surface(bpy.types.Operator):
coords = []
coords.append(object.data.vertices[ed.vertices[0]].co)
coords.append(object.data.vertices[ed.vertices[1]].co)
-
+
original_sel_edges_count += 1
-
+
if not ed.vertices[0] in all_selected_verts:
all_selected_verts.append(ed.vertices[0])
-
+
if not ed.vertices[1] in all_selected_verts:
all_selected_verts.append(ed.vertices[1])
-
-
+
+
tuple(all_selected_verts)
-
-
+
+
#### Check if there is any edge selected. If not, interrupt the script.
if original_sel_edges_count == 0 and all_selected_verts_count > 0:
return 0
-
-
-
+
+
+
#### Get all edges connected to selected verts.
all_edges_around_sel_verts = []
edges_connected_to_sel_verts = {}
@@ -567,140 +565,140 @@ class GPENCIL_OT_SURFSK_add_surface(bpy.types.Operator):
for ed_idx in range(len(object.data.edges)):
ed = object.data.edges[ed_idx]
include_edge = False
-
+
if ed.vertices[0] in all_selected_verts:
if not ed.vertices[0] in edges_connected_to_sel_verts:
edges_connected_to_sel_verts[ed.vertices[0]] = []
-
+
edges_connected_to_sel_verts[ed.vertices[0]].append(ed_idx)
include_edge = True
-
+
if ed.vertices[1] in all_selected_verts:
if not ed.vertices[1] in edges_connected_to_sel_verts:
edges_connected_to_sel_verts[ed.vertices[1]] = []
-
+
edges_connected_to_sel_verts[ed.vertices[1]].append(ed_idx)
include_edge = True
-
-
+
+
if include_edge == True:
all_edges_around_sel_verts.append(ed_idx)
-
-
+
+
# Get all connected verts to each vert.
if not ed.vertices[0] in verts_connected_to_every_vert:
verts_connected_to_every_vert[ed.vertices[0]] = []
-
+
if not ed.vertices[1] in verts_connected_to_every_vert:
verts_connected_to_every_vert[ed.vertices[1]] = []
-
+
verts_connected_to_every_vert[ed.vertices[0]].append(ed.vertices[1])
verts_connected_to_every_vert[ed.vertices[1]].append(ed.vertices[0])
-
-
-
-
+
+
+
+
#### Get all verts connected to faces.
all_verts_part_of_faces = []
all_edges_faces_count = []
all_edges_faces_count += self.edge_face_count(object)
-
+
# Get only the selected edges that have faces attached.
count_faces_of_edges_around_sel_verts = {}
selected_verts_with_faces = []
for ed_idx in all_edges_around_sel_verts:
count_faces_of_edges_around_sel_verts[ed_idx] = all_edges_faces_count[ed_idx]
-
+
if all_edges_faces_count[ed_idx] > 0:
ed = object.data.edges[ed_idx]
-
+
if not ed.vertices[0] in selected_verts_with_faces:
selected_verts_with_faces.append(ed.vertices[0])
-
+
if not ed.vertices[1] in selected_verts_with_faces:
selected_verts_with_faces.append(ed.vertices[1])
-
+
all_verts_part_of_faces.append(ed.vertices[0])
all_verts_part_of_faces.append(ed.vertices[1])
-
+
tuple(selected_verts_with_faces)
-
-
-
+
+
+
#### Discard unneeded verts from calculations.
participating_verts = []
movable_verts = []
for v_idx in all_selected_verts:
vert_has_edges_with_one_face = False
-
+
for ed_idx in edges_connected_to_sel_verts[v_idx]: # Check if the actual vert has at least one edge connected to only one face.
if count_faces_of_edges_around_sel_verts[ed_idx] == 1:
vert_has_edges_with_one_face = True
-
+
# If the vert has two or less edges connected and the vert is not part of any face. Or the vert is part of any face and at least one of the connected edges has only one face attached to it.
if (len(edges_connected_to_sel_verts[v_idx]) == 2 and not v_idx in all_verts_part_of_faces) or len(edges_connected_to_sel_verts[v_idx]) == 1 or (v_idx in all_verts_part_of_faces and vert_has_edges_with_one_face):
participating_verts.append(v_idx)
-
+
if not v_idx in all_verts_part_of_faces:
movable_verts.append(v_idx)
-
-
-
+
+
+
#### Remove from movable verts list those that are part of closed geometry (ie: triangles, quads)
for mv_idx in movable_verts:
freeze_vert = False
mv_connected_verts = verts_connected_to_every_vert[mv_idx]
-
+
for actual_v_idx in all_selected_verts:
count_shared_neighbors = 0
checked_verts = []
-
+
for mv_conn_v_idx in mv_connected_verts:
if mv_idx != actual_v_idx:
if mv_conn_v_idx in verts_connected_to_every_vert[actual_v_idx] and not mv_conn_v_idx in checked_verts:
count_shared_neighbors += 1
checked_verts.append(mv_conn_v_idx)
-
-
+
+
if actual_v_idx in mv_connected_verts:
freeze_vert = True
break
-
+
if count_shared_neighbors == 2:
freeze_vert = True
break
-
+
if freeze_vert:
break
-
+
if freeze_vert:
movable_verts.remove(mv_idx)
-
-
-
+
+
+
#### Calculate merge distance for participating verts.
shortest_edge_length = None
for ed in object.data.edges:
if ed.vertices[0] in movable_verts and ed.vertices[1] in movable_verts:
v1 = object.data.vertices[ed.vertices[0]]
v2 = object.data.vertices[ed.vertices[1]]
-
+
length = (v1.co - v2.co).length
-
+
if shortest_edge_length == None:
shortest_edge_length = length
else:
if length < shortest_edge_length:
shortest_edge_length = length
-
+
if shortest_edge_length != None:
edges_merge_distance = shortest_edge_length * 0.5
else:
edges_merge_distance = 0
-
-
-
-
+
+
+
+
#### Get together the verts near enough. They will be merged later.
remaining_verts = []
remaining_verts += participating_verts
@@ -708,29 +706,29 @@ class GPENCIL_OT_SURFSK_add_surface(bpy.types.Operator):
if v1_idx in remaining_verts and v1_idx in movable_verts:
verts_to_merge = []
coords_verts_to_merge = {}
-
+
verts_to_merge.append(v1_idx)
-
+
v1_co = object.data.vertices[v1_idx].co
coords_verts_to_merge[v1_idx] = (v1_co[0], v1_co[1], v1_co[2])
-
-
+
+
for v2_idx in remaining_verts:
if v1_idx != v2_idx:
v2_co = object.data.vertices[v2_idx].co
-
+
dist = (v1_co - v2_co).length
-
+
if dist <= edges_merge_distance: # Add the verts which are near enough.
verts_to_merge.append(v2_idx)
-
+
coords_verts_to_merge[v2_idx] = (v2_co[0], v2_co[1], v2_co[2])
-
-
+
+
for vm_idx in verts_to_merge:
remaining_verts.remove(vm_idx)
-
-
+
+
if len(verts_to_merge) > 1:
# Calculate middle point of the verts to merge.
sum_x_co = 0
@@ -740,16 +738,16 @@ class GPENCIL_OT_SURFSK_add_surface(bpy.types.Operator):
for i in range(len(verts_to_merge)):
if verts_to_merge[i] in movable_verts:
v_co = object.data.vertices[verts_to_merge[i]].co
-
+
sum_x_co += v_co[0]
sum_y_co += v_co[1]
sum_z_co += v_co[2]
-
+
movable_verts_to_merge_count += 1
-
+
middle_point_co = [sum_x_co / movable_verts_to_merge_count, sum_y_co / movable_verts_to_merge_count, sum_z_co / movable_verts_to_merge_count]
-
-
+
+
# Check if any vert to be merged is not movable.
shortest_dist = None
are_verts_not_movable = False
@@ -758,13 +756,13 @@ class GPENCIL_OT_SURFSK_add_surface(bpy.types.Operator):
if v_merge_idx in participating_verts and not v_merge_idx in movable_verts:
are_verts_not_movable = True
verts_not_movable.append(v_merge_idx)
-
+
if are_verts_not_movable:
# Get the vert connected to faces, that is nearest to the middle point of the movable verts.
shortest_dist = None
for vcf_idx in verts_not_movable:
dist = abs((object.data.vertices[vcf_idx].co - mathutils.Vector(middle_point_co)).length)
-
+
if shortest_dist == None:
shortest_dist = dist
nearest_vert_idx = vcf_idx
@@ -772,46 +770,46 @@ class GPENCIL_OT_SURFSK_add_surface(bpy.types.Operator):
if dist < shortest_dist:
shortest_dist = dist
nearest_vert_idx = vcf_idx
-
+
coords = object.data.vertices[nearest_vert_idx].co
- target_point_co = [coords[0], coords[1], coords[2]]
+ target_point_co = [coords[0], coords[1], coords[2]]
else:
target_point_co = middle_point_co
-
-
+
+
# Move verts to merge to the middle position.
for v_merge_idx in verts_to_merge:
if v_merge_idx in movable_verts: # Only move the verts that are not part of faces.
object.data.vertices[v_merge_idx].co[0] = target_point_co[0]
object.data.vertices[v_merge_idx].co[1] = target_point_co[1]
object.data.vertices[v_merge_idx].co[2] = target_point_co[2]
-
-
-
+
+
+
#### Perform "Remove Doubles" to weld all the disconnected verts
bpy.ops.object.mode_set('INVOKE_REGION_WIN', mode='EDIT')
bpy.ops.mesh.remove_doubles(threshold=0.0001)
-
+
bpy.ops.object.mode_set('INVOKE_REGION_WIN', mode='OBJECT')
-
-
+
+
#### Get all the definitive selected edges, after weldding.
selected_edges = []
edges_per_vert = {} # Number of faces of each selected edge.
for ed in object.data.edges:
if object.data.vertices[ed.vertices[0]].select and object.data.vertices[ed.vertices[1]].select:
selected_edges.append(ed.index)
-
+
# Save all the edges that belong to each vertex.
if not ed.vertices[0] in edges_per_vert:
edges_per_vert[ed.vertices[0]] = []
-
+
if not ed.vertices[1] in edges_per_vert:
edges_per_vert[ed.vertices[1]] = []
-
+
edges_per_vert[ed.vertices[0]].append(ed.index)
edges_per_vert[ed.vertices[1]].append(ed.index)
-
+
# Check if all the edges connected to each vert have two faces attached to them. To discard them later and make calculations faster.
a = []
a += self.edge_face_count(object)
@@ -819,25 +817,25 @@ class GPENCIL_OT_SURFSK_add_surface(bpy.types.Operator):
verts_surrounded_by_faces = {}
for v_idx in edges_per_vert:
edges = edges_per_vert[v_idx]
-
+
edges_with_two_faces_count = 0
for ed_idx in edges_per_vert[v_idx]:
if a[ed_idx] == 2:
edges_with_two_faces_count += 1
-
+
if edges_with_two_faces_count == len(edges_per_vert[v_idx]):
verts_surrounded_by_faces[v_idx] = True
else:
verts_surrounded_by_faces[v_idx] = False
-
-
+
+
#### Get all the selected vertices.
selected_verts_idx = []
for v in object.data.vertices:
if v.select:
selected_verts_idx.append(v.index)
-
-
+
+
#### Get all the faces of the object.
all_object_faces_verts_idx = []
for face in object.data.polygons:
@@ -845,44 +843,44 @@ class GPENCIL_OT_SURFSK_add_surface(bpy.types.Operator):
face_verts.append(face.vertices[0])
face_verts.append(face.vertices[1])
face_verts.append(face.vertices[2])
-
+
if len(face.vertices) == 4:
face_verts.append(face.vertices[3])
-
+
all_object_faces_verts_idx.append(face_verts)
-
-
+
+
#### Deselect all vertices.
bpy.ops.object.mode_set('INVOKE_REGION_WIN', mode='EDIT')
bpy.ops.mesh.select_all('INVOKE_REGION_WIN', action='DESELECT')
bpy.ops.object.mode_set('INVOKE_REGION_WIN', mode='OBJECT')
-
-
-
+
+
+
#### Make a dictionary with the verts related to each vert.
related_key_verts = {}
for ed_idx in selected_edges:
ed = object.data.edges[ed_idx]
-
+
if not verts_surrounded_by_faces[ed.vertices[0]]:
if not ed.vertices[0] in related_key_verts:
related_key_verts[ed.vertices[0]] = []
-
+
if not ed.vertices[1] in related_key_verts[ed.vertices[0]]:
related_key_verts[ed.vertices[0]].append(ed.vertices[1])
-
+
if not verts_surrounded_by_faces[ed.vertices[1]]:
if not ed.vertices[1] in related_key_verts:
related_key_verts[ed.vertices[1]] = []
-
+
if not ed.vertices[0] in related_key_verts[ed.vertices[1]]:
related_key_verts[ed.vertices[1]].append(ed.vertices[0])
-
-
-
+
+
+
#### Get groups of verts forming each face.
- faces_verts_idx = []
- for v1 in related_key_verts: # verts-1 ....
+ faces_verts_idx = []
+ for v1 in related_key_verts: # verts-1 ....
for v2 in related_key_verts: # verts-2
if v1 != v2:
related_verts_in_common = []
@@ -891,57 +889,57 @@ class GPENCIL_OT_SURFSK_add_surface(bpy.types.Operator):
for rel_v1 in related_key_verts[v1]:
if rel_v1 in related_key_verts[v2]: # Check if related verts of verts-1 are related verts of verts-2.
related_verts_in_common.append(rel_v1)
-
+
if v2 in related_key_verts[v1]:
v2_in_rel_v1 = True
-
+
if v1 in related_key_verts[v2]:
v1_in_rel_v2 = True
-
-
+
+
repeated_face = False
# If two verts have two related verts in common, they form a quad.
if len(related_verts_in_common) == 2:
# Check if the face is already saved.
all_faces_to_check_idx = faces_verts_idx + all_object_faces_verts_idx
-
-
+
+
for f_verts in all_faces_to_check_idx:
repeated_verts = 0
-
+
if len(f_verts) == 4:
if v1 in f_verts: repeated_verts += 1
if v2 in f_verts: repeated_verts += 1
if related_verts_in_common[0] in f_verts: repeated_verts += 1
if related_verts_in_common[1] in f_verts: repeated_verts += 1
-
+
if repeated_verts == len(f_verts):
repeated_face = True
break
-
+
if not repeated_face:
faces_verts_idx.append([v1, related_verts_in_common[0], v2, related_verts_in_common[1]])
-
+
elif v2_in_rel_v1 and v1_in_rel_v2 and len(related_verts_in_common) == 1: # If Two verts have one related vert in common and they are related to each other, they form a triangle.
# Check if the face is already saved.
all_faces_to_check_idx = faces_verts_idx + all_object_faces_verts_idx
-
+
for f_verts in all_faces_to_check_idx:
repeated_verts = 0
-
+
if len(f_verts) == 3:
if v1 in f_verts: repeated_verts += 1
if v2 in f_verts: repeated_verts += 1
if related_verts_in_common[0] in f_verts: repeated_verts += 1
-
+
if repeated_verts == len(f_verts):
repeated_face = True
break
-
+
if not repeated_face:
faces_verts_idx.append([v1, related_verts_in_common[0], v2])
-
-
+
+
#### Keep only the faces that don't overlap by ignoring quads that overlap with two adjacent triangles.
faces_to_not_include_idx = [] # Indices of faces_verts_idx to eliminate.
all_faces_to_check_idx = faces_verts_idx + all_object_faces_verts_idx
@@ -949,75 +947,75 @@ class GPENCIL_OT_SURFSK_add_surface(bpy.types.Operator):
for t in range(len(all_faces_to_check_idx)):
if i != t:
verts_in_common = 0
-
+
if len(faces_verts_idx[i]) == 4 and len(all_faces_to_check_idx[t]) == 3:
for v_idx in all_faces_to_check_idx[t]:
if v_idx in faces_verts_idx[i]:
verts_in_common += 1
-
+
if verts_in_common == 3: # If it doesn't have all it's vertices repeated in the other face.
if not i in faces_to_not_include_idx:
faces_to_not_include_idx.append(i)
-
-
+
+
#### Build faces discarding the ones in faces_to_not_include.
me = object.data
bm = bmesh.new()
bm.from_mesh(me)
-
+
num_faces_created = 0
for i in range(len(faces_verts_idx)):
if not i in faces_to_not_include_idx:
bm.faces.new([ bm.verts[v] for v in faces_verts_idx[i] ])
-
+
num_faces_created += 1
-
+
bm.to_mesh(me)
bm.free()
-
-
-
+
+
+
for v_idx in selected_verts_idx:
self.main_object.data.vertices[v_idx].select = True
-
-
+
+
bpy.ops.object.mode_set('INVOKE_REGION_WIN', mode='EDIT')
bpy.ops.mesh.normals_make_consistent(inside=False)
bpy.ops.object.mode_set('INVOKE_REGION_WIN', mode='OBJECT')
-
-
+
+
return num_faces_created
-
-
-
+
+
+
#### Crosshatch skinning.
def crosshatch_surface_invoke(self, ob_original_splines):
self.is_crosshatch = False
self.crosshatch_merge_distance = 0
-
-
+
+
objects_to_delete = [] # duplicated strokes to be deleted.
-
+
# If the main object uses modifiers deactivate them temporarily until the surface is joined. (without this the surface verts merging with the main object doesn't work well)
self.modifiers_prev_viewport_state = []
if len(self.main_object.modifiers) > 0:
for m_idx in range(len(self.main_object.modifiers)):
self.modifiers_prev_viewport_state.append(self.main_object.modifiers[m_idx].show_viewport)
-
+
self.main_object.modifiers[m_idx].show_viewport = False
-
-
+
+
bpy.ops.object.select_all('INVOKE_REGION_WIN', action='DESELECT')
bpy.data.objects[ob_original_splines.name].select = True
bpy.context.scene.objects.active = bpy.data.objects[ob_original_splines.name]
-
-
+
+
if len(ob_original_splines.data.splines) >= 2:
bpy.ops.object.duplicate('INVOKE_REGION_WIN')
ob_splines = bpy.context.object
ob_splines.name = "SURFSKIO_NE_STR"
-
-
+
+
#### Get estimative merge distance (sum up the distances from the first point to all other points, then average them and then divide them).
first_point_dist_sum = 0
first_dist = 0
@@ -1025,312 +1023,312 @@ class GPENCIL_OT_SURFSK_add_surface(bpy.types.Operator):
coords_first_pt = ob_splines.data.splines[0].bezier_points[0].co
for i in range(len(ob_splines.data.splines)):
sp = ob_splines.data.splines[i]
-
+
if coords_first_pt != sp.bezier_points[0].co:
first_dist = (coords_first_pt - sp.bezier_points[0].co).length
-
+
if coords_first_pt != sp.bezier_points[len(sp.bezier_points) - 1].co:
second_dist = (coords_first_pt - sp.bezier_points[len(sp.bezier_points) - 1].co).length
-
+
first_point_dist_sum += first_dist + second_dist
-
-
+
+
if i == 0:
if first_dist != 0:
shortest_dist = first_dist
elif second_dist != 0:
shortest_dist = second_dist
-
-
+
+
if shortest_dist > first_dist and first_dist != 0:
shortest_dist = first_dist
-
+
if shortest_dist > second_dist and second_dist != 0:
shortest_dist = second_dist
-
-
+
+
self.crosshatch_merge_distance = shortest_dist / 20
-
-
-
+
+
+
#### Recalculation of merge distance.
-
+
bpy.ops.object.duplicate('INVOKE_REGION_WIN')
-
+
ob_calc_merge_dist = bpy.context.object
ob_calc_merge_dist.name = "SURFSKIO_CALC_TMP"
-
+
objects_to_delete.append(ob_calc_merge_dist)
-
-
-
+
+
+
#### Smooth out strokes a little to improve crosshatch detection.
bpy.ops.object.editmode_toggle('INVOKE_REGION_WIN')
bpy.ops.curve.select_all('INVOKE_REGION_WIN', action='SELECT')
-
+
for i in range(4):
bpy.ops.curve.smooth('INVOKE_REGION_WIN')
-
+
bpy.ops.curve.select_all('INVOKE_REGION_WIN', action='DESELECT')
bpy.ops.object.editmode_toggle('INVOKE_REGION_WIN')
-
-
-
+
+
+
#### Convert curves into mesh.
ob_calc_merge_dist.data.resolution_u = 12
bpy.ops.object.convert(target='MESH', keep_original=False)
-
+
# Find "intersection-nodes".
bpy.ops.object.editmode_toggle('INVOKE_REGION_WIN')
bpy.ops.mesh.select_all('INVOKE_REGION_WIN', action='SELECT')
bpy.ops.mesh.remove_doubles('INVOKE_REGION_WIN', threshold=self.crosshatch_merge_distance)
bpy.ops.mesh.select_all('INVOKE_REGION_WIN', action='DESELECT')
bpy.ops.object.editmode_toggle('INVOKE_REGION_WIN')
-
+
# Remove verts with less than three edges.
verts_edges_count = {}
for ed in ob_calc_merge_dist.data.edges:
v = ed.vertices
-
+
if v[0] not in verts_edges_count:
verts_edges_count[v[0]] = 0
-
+
if v[1] not in verts_edges_count:
verts_edges_count[v[1]] = 0
-
+
verts_edges_count[v[0]] += 1
verts_edges_count[v[1]] += 1
-
+
nodes_verts_coords = []
for v_idx in verts_edges_count:
v = ob_calc_merge_dist.data.vertices[v_idx]
-
+
if verts_edges_count[v_idx] < 3:
v.select = True
-
-
+
+
# Remove them.
bpy.ops.object.editmode_toggle('INVOKE_REGION_WIN')
bpy.ops.mesh.delete('INVOKE_REGION_WIN', type='VERT')
bpy.ops.mesh.select_all('INVOKE_REGION_WIN', action='SELECT')
-
+
# Remove doubles to discard very near verts from calculations of distance.
bpy.ops.mesh.remove_doubles('INVOKE_REGION_WIN', threshold=self.crosshatch_merge_distance * 4.0)
bpy.ops.object.editmode_toggle('INVOKE_REGION_WIN')
-
+
# Get all coords of the resulting nodes.
nodes_verts_coords = [(v.co[0], v.co[1], v.co[2]) for v in ob_calc_merge_dist.data.vertices]
-
+
#### Check if the strokes are a crosshatch.
if len(nodes_verts_coords) >= 3:
self.is_crosshatch = True
-
+
shortest_dist = None
for co_1 in nodes_verts_coords:
for co_2 in nodes_verts_coords:
if co_1 != co_2:
dist = (mathutils.Vector(co_1) - mathutils.Vector(co_2)).length
-
+
if shortest_dist != None:
if dist < shortest_dist:
shortest_dist = dist
else:
shortest_dist = dist
-
+
self.crosshatch_merge_distance = shortest_dist / 3
-
-
+
+
bpy.ops.object.select_all('INVOKE_REGION_WIN', action='DESELECT')
bpy.data.objects[ob_splines.name].select = True
bpy.context.scene.objects.active = bpy.data.objects[ob_splines.name]
-
+
#### Deselect all points.
bpy.ops.object.editmode_toggle('INVOKE_REGION_WIN')
bpy.ops.curve.select_all('INVOKE_REGION_WIN', action='DESELECT')
bpy.ops.object.editmode_toggle('INVOKE_REGION_WIN')
-
-
-
+
+
+
#### Smooth splines in a localized way, to eliminate "saw-teeth" like shapes when there are many points.
for sp in ob_splines.data.splines:
angle_sum = 0
-
+
angle_limit = 2 # Degrees
for t in range(len(sp.bezier_points)):
if t <= len(sp.bezier_points) - 3: # Because on each iteration it checks the "next two points" of the actual. This way it doesn't go out of range.
p1 = sp.bezier_points[t]
p2 = sp.bezier_points[t + 1]
p3 = sp.bezier_points[t + 2]
-
+
vec_1 = p1.co - p2.co
vec_2 = p2.co - p3.co
-
+
if p2.co != p1.co and p2.co != p3.co:
angle = vec_1.angle(vec_2)
angle_sum += degrees(angle)
-
+
if angle_sum >= angle_limit: # If sum of angles is grater than the limit.
if (p1.co - p2.co).length <= self.crosshatch_merge_distance:
p1.select_control_point = True; p1.select_left_handle = True; p1.select_right_handle = True
p2.select_control_point = True; p2.select_left_handle = True; p2.select_right_handle = True
-
+
if (p1.co - p2.co).length <= self.crosshatch_merge_distance:
p3.select_control_point = True; p3.select_left_handle = True; p3.select_right_handle = True
-
+
angle_sum = 0
-
+
sp.bezier_points[0].select_control_point = False
sp.bezier_points[0].select_left_handle = False
sp.bezier_points[0].select_right_handle = False
-
+
sp.bezier_points[len(sp.bezier_points) - 1].select_control_point = False
sp.bezier_points[len(sp.bezier_points) - 1].select_left_handle = False
sp.bezier_points[len(sp.bezier_points) - 1].select_right_handle = False
-
-
-
+
+
+
#### Smooth out strokes a little to improve crosshatch detection.
bpy.ops.object.editmode_toggle('INVOKE_REGION_WIN')
-
+
for i in range(15):
bpy.ops.curve.smooth('INVOKE_REGION_WIN')
-
+
bpy.ops.curve.select_all('INVOKE_REGION_WIN', action='DESELECT')
bpy.ops.object.editmode_toggle('INVOKE_REGION_WIN')
-
-
-
-
+
+
+
+
#### Simplify the splines.
for sp in ob_splines.data.splines:
angle_sum = 0
-
+
sp.bezier_points[0].select_control_point = True
sp.bezier_points[0].select_left_handle = True
sp.bezier_points[0].select_right_handle = True
-
+
sp.bezier_points[len(sp.bezier_points) - 1].select_control_point = True
sp.bezier_points[len(sp.bezier_points) - 1].select_left_handle = True
sp.bezier_points[len(sp.bezier_points) - 1].select_right_handle = True
-
-
+
+
angle_limit = 15 # Degrees
for t in range(len(sp.bezier_points)):
if t <= len(sp.bezier_points) - 3: # Because on each iteration it checks the "next two points" of the actual. This way it doesn't go out of range.
p1 = sp.bezier_points[t]
p2 = sp.bezier_points[t + 1]
p3 = sp.bezier_points[t + 2]
-
+
vec_1 = p1.co - p2.co
vec_2 = p2.co - p3.co
-
+
if p2.co != p1.co and p2.co != p3.co:
angle = vec_1.angle(vec_2)
angle_sum += degrees(angle)
-
+
if angle_sum >= angle_limit: # If sum of angles is grater than the limit.
p1.select_control_point = True; p1.select_left_handle = True; p1.select_right_handle = True
p2.select_control_point = True; p2.select_left_handle = True; p2.select_right_handle = True
p3.select_control_point = True; p3.select_left_handle = True; p3.select_right_handle = True
-
+
angle_sum = 0
-
-
-
+
+
+
bpy.ops.object.editmode_toggle('INVOKE_REGION_WIN')
-
+
bpy.ops.curve.select_all(action = 'INVERT')
-
+
bpy.ops.curve.delete(type='VERT')
bpy.ops.object.editmode_toggle('INVOKE_REGION_WIN')
-
-
-
+
+
+
objects_to_delete.append(ob_splines)
-
-
+
+
bpy.ops.object.editmode_toggle('INVOKE_REGION_WIN')
bpy.ops.curve.select_all('INVOKE_REGION_WIN', action='DESELECT')
bpy.ops.object.editmode_toggle('INVOKE_REGION_WIN')
-
-
+
+
#### Check if the strokes are a crosshatch.
if self.is_crosshatch:
all_points_coords = []
for i in range(len(ob_splines.data.splines)):
all_points_coords.append([])
-
+
all_points_coords[i] = [mathutils.Vector((x, y, z)) for x, y, z in [bp.co for bp in ob_splines.data.splines[i].bezier_points]]
-
-
+
+
all_intersections = []
checked_splines = []
for i in range(len(all_points_coords)):
-
+
for t in range(len(all_points_coords[i]) - 1):
bp1_co = all_points_coords[i][t]
bp2_co = all_points_coords[i][t + 1]
-
+
for i2 in range(len(all_points_coords)):
if i != i2 and not i2 in checked_splines:
for t2 in range(len(all_points_coords[i2]) - 1):
bp3_co = all_points_coords[i2][t2]
bp4_co = all_points_coords[i2][t2 + 1]
-
-
+
+
intersec_coords = mathutils.geometry.intersect_line_line(bp1_co, bp2_co, bp3_co, bp4_co)
-
+
if intersec_coords != None:
dist = (intersec_coords[0] - intersec_coords[1]).length
-
+
if dist <= self.crosshatch_merge_distance * 1.5:
temp_co, percent1 = mathutils.geometry.intersect_point_line(intersec_coords[0], bp1_co, bp2_co)
-
+
if (percent1 >= -0.02 and percent1 <= 1.02):
temp_co, percent2 = mathutils.geometry.intersect_point_line(intersec_coords[1], bp3_co, bp4_co)
if (percent2 >= -0.02 and percent2 <= 1.02):
all_intersections.append((i, t, percent1, ob_splines.matrix_world * intersec_coords[0])) # Format: spline index, first point index from corresponding segment, percentage from first point of actual segment, coords of intersection point.
all_intersections.append((i2, t2, percent2, ob_splines.matrix_world * intersec_coords[1]))
-
-
-
- checked_splines.append(i)
-
-
+
+
+
+ checked_splines.append(i)
+
+
all_intersections.sort(key = operator.itemgetter(0,1,2)) # Sort list by spline, then by corresponding first point index of segment, and then by percentage from first point of segment: elements 0 and 1 respectively.
-
-
-
+
+
+
self.crosshatch_strokes_coords = {}
for i in range(len(all_intersections)):
if not all_intersections[i][0] in self.crosshatch_strokes_coords:
self.crosshatch_strokes_coords[all_intersections[i][0]] = []
-
+
self.crosshatch_strokes_coords[all_intersections[i][0]].append(all_intersections[i][3]) # Save intersection coords.
-
+
else:
self.is_crosshatch = False
-
-
+
+
#### Delete all duplicates.
for o in objects_to_delete:
bpy.ops.object.select_all('INVOKE_REGION_WIN', action='DESELECT')
bpy.data.objects[o.name].select = True
bpy.context.scene.objects.active = bpy.data.objects[o.name]
bpy.ops.object.delete()
-
-
+
+
#### If the main object has modifiers, turn their "viewport view status" to what it was before the forced deactivation above.
if len(self.main_object.modifiers) > 0:
for m_idx in range(len(self.main_object.modifiers)):
self.main_object.modifiers[m_idx].show_viewport = self.modifiers_prev_viewport_state[m_idx]
-
-
-
+
+
+
return
-
-
-
+
+
+
#### Part of the Crosshatch process that is repeated when the operator is tweaked.
def crosshatch_surface_execute(self):
# If the main object uses modifiers deactivate them temporarily until the surface is joined. (without this the surface verts merging with the main object doesn't work well)
@@ -1338,54 +1336,54 @@ class GPENCIL_OT_SURFSK_add_surface(bpy.types.Operator):
if len(self.main_object.modifiers) > 0:
for m_idx in range(len(self.main_object.modifiers)):
self.modifiers_prev_viewport_state.append(self.main_object.modifiers[m_idx].show_viewport)
-
+
self.main_object.modifiers[m_idx].show_viewport = False
-
-
+
+
bpy.ops.object.editmode_toggle('INVOKE_REGION_WIN')
-
-
-
+
+
+
me_name = "SURFSKIO_STK_TMP"
me = bpy.data.meshes.new(me_name)
-
+
all_verts_coords = []
all_edges = []
for st_idx in self.crosshatch_strokes_coords:
for co_idx in range(len(self.crosshatch_strokes_coords[st_idx])):
coords = self.crosshatch_strokes_coords[st_idx][co_idx]
-
+
all_verts_coords.append(coords)
-
+
if co_idx > 0:
all_edges.append((len(all_verts_coords) - 2, len(all_verts_coords) - 1))
-
-
+
+
me.from_pydata(all_verts_coords, all_edges, [])
-
+
me.update()
-
+
ob = bpy.data.objects.new(me_name, me)
ob.data = me
bpy.context.scene.objects.link(ob)
-
-
+
+
bpy.ops.object.select_all('INVOKE_REGION_WIN', action='DESELECT')
bpy.data.objects[ob.name].select = True
bpy.context.scene.objects.active = bpy.data.objects[ob.name]
-
-
+
+
#### Get together each vert and its nearest, to the middle position.
verts = ob.data.vertices
checked_verts = []
for i in range(len(verts)):
shortest_dist = None
-
+
if not i in checked_verts:
for t in range(len(verts)):
if i != t and not t in checked_verts:
dist = (verts[i].co - verts[t].co).length
-
+
if shortest_dist != None:
if dist < shortest_dist:
shortest_dist = dist
@@ -1393,61 +1391,61 @@ class GPENCIL_OT_SURFSK_add_surface(bpy.types.Operator):
else:
shortest_dist = dist
nearest_vert = t
-
+
middle_location = (verts[i].co + verts[nearest_vert].co) / 2
-
+
verts[i].co = middle_location
verts[nearest_vert].co = middle_location
-
+
checked_verts.append(i)
checked_verts.append(nearest_vert)
-
-
-
-
+
+
+
+
#### Calculate average length between all the generated edges.
ob = bpy.context.object
lengths_sum = 0
for ed in ob.data.edges:
v1 = ob.data.vertices[ed.vertices[0]]
v2 = ob.data.vertices[ed.vertices[1]]
-
+
lengths_sum += (v1.co - v2.co).length
-
+
edges_count = len(ob.data.edges)
-
+
average_edge_length = lengths_sum / edges_count
-
-
+
+
bpy.ops.object.editmode_toggle('INVOKE_REGION_WIN')
bpy.ops.mesh.select_all('INVOKE_REGION_WIN', action='SELECT')
bpy.ops.mesh.remove_doubles('INVOKE_REGION_WIN', threshold=average_edge_length / 15.0)
bpy.ops.object.editmode_toggle('INVOKE_REGION_WIN')
-
+
final_points_ob = bpy.context.scene.objects.active
-
-
+
+
#### Make a dictionary with the verts related to each vert.
related_key_verts = {}
for ed in final_points_ob.data.edges:
if not ed.vertices[0] in related_key_verts:
related_key_verts[ed.vertices[0]] = []
-
+
if not ed.vertices[1] in related_key_verts:
related_key_verts[ed.vertices[1]] = []
-
-
+
+
if not ed.vertices[1] in related_key_verts[ed.vertices[0]]:
related_key_verts[ed.vertices[0]].append(ed.vertices[1])
-
+
if not ed.vertices[0] in related_key_verts[ed.vertices[1]]:
related_key_verts[ed.vertices[1]].append(ed.vertices[0])
-
-
-
+
+
+
#### Get groups of verts forming each face.
- faces_verts_idx = []
- for v1 in related_key_verts: # verts-1 ....
+ faces_verts_idx = []
+ for v1 in related_key_verts: # verts-1 ....
for v2 in related_key_verts: # verts-2
if v1 != v2:
related_verts_in_common = []
@@ -1456,76 +1454,76 @@ class GPENCIL_OT_SURFSK_add_surface(bpy.types.Operator):
for rel_v1 in related_key_verts[v1]:
if rel_v1 in related_key_verts[v2]: # Check if related verts of verts-1 are related verts of verts-2.
related_verts_in_common.append(rel_v1)
-
+
if v2 in related_key_verts[v1]:
v2_in_rel_v1 = True
-
+
if v1 in related_key_verts[v2]:
v1_in_rel_v2 = True
-
-
+
+
repeated_face = False
# If two verts have two related verts in common, they form a quad.
if len(related_verts_in_common) == 2:
# Check if the face is already saved.
for f_verts in faces_verts_idx:
repeated_verts = 0
-
+
if len(f_verts) == 4:
if v1 in f_verts: repeated_verts += 1
if v2 in f_verts: repeated_verts += 1
if related_verts_in_common[0] in f_verts: repeated_verts += 1
if related_verts_in_common[1] in f_verts: repeated_verts += 1
-
+
if repeated_verts == len(f_verts):
repeated_face = True
break
-
+
if not repeated_face:
faces_verts_idx.append([v1, related_verts_in_common[0], v2, related_verts_in_common[1]])
-
+
elif v2_in_rel_v1 and v1_in_rel_v2 and len(related_verts_in_common) == 1: # If Two verts have one related vert in common and they are related to each other, they form a triangle.
# Check if the face is already saved.
for f_verts in faces_verts_idx:
repeated_verts = 0
-
+
if len(f_verts) == 3:
if v1 in f_verts: repeated_verts += 1
if v2 in f_verts: repeated_verts += 1
if related_verts_in_common[0] in f_verts: repeated_verts += 1
-
+
if repeated_verts == len(f_verts):
repeated_face = True
break
-
+
if not repeated_face:
faces_verts_idx.append([v1, related_verts_in_common[0], v2])
-
-
+
+
#### Keep only the faces that don't overlap by ignoring quads that overlap with two adjacent triangles.
faces_to_not_include_idx = [] # Indices of faces_verts_idx to eliminate.
for i in range(len(faces_verts_idx)):
for t in range(len(faces_verts_idx)):
if i != t:
verts_in_common = 0
-
+
if len(faces_verts_idx[i]) == 4 and len(faces_verts_idx[t]) == 3:
for v_idx in faces_verts_idx[t]:
if v_idx in faces_verts_idx[i]:
verts_in_common += 1
-
+
if verts_in_common == 3: # If it doesn't have all it's vertices repeated in the other face.
if not i in faces_to_not_include_idx:
faces_to_not_include_idx.append(i)
-
-
+
+
#### Build surface.
all_surface_verts_co = []
verts_idx_translation = {}
for i in range(len(final_points_ob.data.vertices)):
coords = final_points_ob.data.vertices[i].co
all_surface_verts_co.append([coords[0], coords[1], coords[2]])
-
+
# Verts of each face.
all_surface_faces = []
for i in range(len(faces_verts_idx)):
@@ -1533,99 +1531,99 @@ class GPENCIL_OT_SURFSK_add_surface(bpy.types.Operator):
face = []
for v_idx in faces_verts_idx[i]:
face.append(v_idx)
-
+
all_surface_faces.append(face)
-
+
# Build the mesh.
surf_me_name = "SURFSKIO_surface"
me_surf = bpy.data.meshes.new(surf_me_name)
-
+
me_surf.from_pydata(all_surface_verts_co, [], all_surface_faces)
-
+
me_surf.update()
-
+
ob_surface = bpy.data.objects.new(surf_me_name, me_surf)
bpy.context.scene.objects.link(ob_surface)
-
+
# Delete final points temporal object
bpy.ops.object.select_all('INVOKE_REGION_WIN', action='DESELECT')
bpy.data.objects[final_points_ob.name].select = True
bpy.context.scene.objects.active = bpy.data.objects[final_points_ob.name]
-
+
bpy.ops.object.delete()
-
-
+
+
# Delete isolated verts if there are any.
bpy.ops.object.select_all('INVOKE_REGION_WIN', action='DESELECT')
bpy.data.objects[ob_surface.name].select = True
bpy.context.scene.objects.active = bpy.data.objects[ob_surface.name]
-
+
bpy.ops.object.editmode_toggle('INVOKE_REGION_WIN')
bpy.ops.mesh.select_all(action='DESELECT')
bpy.ops.mesh.select_face_by_sides(type='NOTEQUAL')
bpy.ops.mesh.delete()
bpy.ops.object.editmode_toggle('INVOKE_REGION_WIN')
-
-
-
+
+
+
#### Join crosshatch results with original mesh.
-
+
# Calculate a distance to merge the verts of the crosshatch surface to the main object.
edges_length_sum = 0
for ed in ob_surface.data.edges:
edges_length_sum += (ob_surface.data.vertices[ed.vertices[0]].co - ob_surface.data.vertices[ed.vertices[1]].co).length
-
+
if len(ob_surface.data.edges) > 0:
average_surface_edges_length = edges_length_sum / len(ob_surface.data.edges)
else:
average_surface_edges_length = 0.0001
-
+
# Make dictionary with all the verts connected to each vert, on the new surface object.
surface_connected_verts = {}
for ed in ob_surface.data.edges:
if not ed.vertices[0] in surface_connected_verts:
surface_connected_verts[ed.vertices[0]] = []
-
+
surface_connected_verts[ed.vertices[0]].append(ed.vertices[1])
-
-
+
+
if not ed.vertices[1] in surface_connected_verts:
surface_connected_verts[ed.vertices[1]] = []
-
+
surface_connected_verts[ed.vertices[1]].append(ed.vertices[0])
-
-
-
+
+
+
# Duplicate the new surface object, and use shrinkwrap to calculate later the nearest verts to the main object.
bpy.ops.object.editmode_toggle('INVOKE_REGION_WIN')
bpy.ops.mesh.select_all('INVOKE_REGION_WIN', action='DESELECT')
bpy.ops.object.editmode_toggle('INVOKE_REGION_WIN')
-
+
bpy.ops.object.duplicate('INVOKE_REGION_WIN')
-
+
final_ob_duplicate = bpy.context.scene.objects.active
-
+
bpy.ops.object.modifier_add('INVOKE_REGION_WIN', type='SHRINKWRAP')
final_ob_duplicate.modifiers["Shrinkwrap"].wrap_method = "NEAREST_VERTEX"
final_ob_duplicate.modifiers["Shrinkwrap"].target = self.main_object
-
+
bpy.ops.object.modifier_apply('INVOKE_REGION_WIN', apply_as='DATA', modifier='Shrinkwrap')
-
-
+
+
# Make list with verts of original mesh as index and coords as value.
main_object_verts_coords = []
for v in self.main_object.data.vertices:
coords = self.main_object.matrix_world * v.co
-
+
for c in range(len(coords)): # To avoid problems when taking "-0.00" as a different value as "0.00".
if "%.3f" % coords[c] == "-0.00":
coords[c] = 0
-
+
main_object_verts_coords.append(["%.3f" % coords[0], "%.3f" % coords[1], "%.3f" % coords[2]])
-
+
tuple(main_object_verts_coords)
-
-
+
+
# Determine which verts will be merged, snap them to the nearest verts on the original verts, and get them selected.
crosshatch_verts_to_merge = []
if self.automatic_join:
@@ -1637,89 +1635,89 @@ class GPENCIL_OT_SURFSK_add_surface(bpy.types.Operator):
points_original = []
points_original.append(ob_surface.data.vertices[c_v_idx].co)
points_original.append(ob_surface.data.vertices[i].co)
-
+
points_target = []
points_target.append(ob_surface.data.vertices[c_v_idx].co)
points_target.append(final_ob_duplicate.data.vertices[i].co)
-
+
vec_A = points_original[0] - points_original[1]
vec_B = points_target[0] - points_target[1]
-
+
dist_A = (points_original[0] - points_original[1]).length
dist_B = (points_target[0] - points_target[1]).length
-
-
+
+
if not (points_original[0] == points_original[1] or points_target[0] == points_target[1]): # If any vector's length is zero.
angle = vec_A.angle(vec_B) / math.pi
else:
angle= 0
-
-
+
+
if dist_B > dist_A * 1.7 * self.join_stretch_factor or dist_B < dist_A / 2 / self.join_stretch_factor or angle >= 0.15 * self.join_stretch_factor: # Set a range of acceptable variation in the connected edges.
merge_actual_vert = False
break
else:
merge_actual_vert = False
-
-
+
+
if merge_actual_vert:
coords = final_ob_duplicate.data.vertices[i].co
-
+
for c in range(len(coords)): # To avoid problems when taking "-0.000" as a different value as "0.00".
if "%.3f" % coords[c] == "-0.00":
coords[c] = 0
-
+
comparison_coords = ["%.3f" % coords[0], "%.3f" % coords[1], "%.3f" % coords[2]]
-
-
+
+
if comparison_coords in main_object_verts_coords:
main_object_related_vert_idx = main_object_verts_coords.index(comparison_coords) # Get the index of the vert with those coords in the main object.
-
+
if self.main_object.data.vertices[main_object_related_vert_idx].select == True or self.main_object_selected_verts_count == 0:
ob_surface.data.vertices[i].co = final_ob_duplicate.data.vertices[i].co
ob_surface.data.vertices[i].select = True
crosshatch_verts_to_merge.append(i)
-
+
# Make sure the vert in the main object is selected, in case it wasn't selected and the "join crosshatch" option is active.
self.main_object.data.vertices[main_object_related_vert_idx].select = True
-
-
-
-
+
+
+
+
# Delete duplicated object.
bpy.ops.object.select_all('INVOKE_REGION_WIN', action='DESELECT')
bpy.data.objects[final_ob_duplicate.name].select = True
bpy.context.scene.objects.active = bpy.data.objects[final_ob_duplicate.name]
bpy.ops.object.delete()
-
-
+
+
# Join crosshatched surface and main object.
bpy.ops.object.select_all('INVOKE_REGION_WIN', action='DESELECT')
bpy.data.objects[ob_surface.name].select = True
bpy.data.objects[self.main_object.name].select = True
bpy.context.scene.objects.active = bpy.data.objects[self.main_object.name]
-
+
bpy.ops.object.join('INVOKE_REGION_WIN')
-
+
bpy.ops.object.editmode_toggle('INVOKE_REGION_WIN')
# Perform Remove doubles to merge verts.
if not (self.automatic_join == False and self.main_object_selected_verts_count == 0):
bpy.ops.mesh.remove_doubles(threshold=0.0001)
-
+
bpy.ops.mesh.select_all(action='DESELECT')
-
-
+
+
#### If the main object has modifiers, turn their "viewport view status" to what it was before the forced deactivation above.
if len(self.main_object.modifiers) > 0:
for m_idx in range(len(self.main_object.modifiers)):
self.main_object.modifiers[m_idx].show_viewport = self.modifiers_prev_viewport_state[m_idx]
-
-
-
+
+
+
return{'FINISHED'}
-
-
-
+
+
+
def rectangular_surface(self):
#### Selected edges.
all_selected_edges_idx = []
@@ -1728,36 +1726,36 @@ class GPENCIL_OT_SURFSK_add_surface(bpy.types.Operator):
for ed in self.main_object.data.edges:
if ed.select:
all_selected_edges_idx.append(ed.index)
-
+
# Selected vertices.
if not ed.vertices[0] in all_selected_verts:
all_selected_verts.append(self.main_object.data.vertices[ed.vertices[0]])
if not ed.vertices[1] in all_selected_verts:
all_selected_verts.append(self.main_object.data.vertices[ed.vertices[1]])
-
+
# All verts (both from each edge) to determine later which are at the tips (those not repeated twice).
all_verts_idx.append(ed.vertices[0])
all_verts_idx.append(ed.vertices[1])
-
-
-
+
+
+
#### Identify the tips and "middle-vertex" that separates U from V, if there is one.
all_chains_tips_idx = []
for v_idx in all_verts_idx:
if all_verts_idx.count(v_idx) < 2:
all_chains_tips_idx.append(v_idx)
-
-
-
+
+
+
edges_connected_to_tips = []
for ed in self.main_object.data.edges:
if (ed.vertices[0] in all_chains_tips_idx or ed.vertices[1] in all_chains_tips_idx) and not (ed.vertices[0] in all_verts_idx and ed.vertices[1] in all_verts_idx):
edges_connected_to_tips.append(ed)
-
-
+
+
#### Check closed selections.
single_unselected_verts_and_neighbors = [] # List with groups of three verts, where the first element of the pair is the unselected vert of a closed selection and the other two elements are the selected neighbor verts (it will be useful to determine which selection chain the unselected vert belongs to, and determine the "middle-vertex")
-
+
# To identify a "closed" selection (a selection that is a closed chain except for one vertex) find the vertex in common that have the edges connected to tips. If there is a vertex in common, that one is the unselected vert that closes the selection or is a "middle-vertex".
single_unselected_verts = []
for ed in edges_connected_to_tips:
@@ -1779,20 +1777,20 @@ class GPENCIL_OT_SURFSK_add_surface(bpy.types.Operator):
single_unselected_verts_and_neighbors.append([ed.vertices[1], ed.vertices[0], ed_b.vertices[0]])
single_unselected_verts.append(ed.vertices[1])
break
-
-
+
+
middle_vertex_idx = None
tips_to_discard_idx = []
# Check if there is a "middle-vertex", and get its index.
for i in range(0, len(single_unselected_verts_and_neighbors)):
actual_chain_verts = self.get_ordered_verts(self.main_object, all_selected_edges_idx, all_verts_idx, single_unselected_verts_and_neighbors[i][1], None, None)
-
+
if single_unselected_verts_and_neighbors[i][2] != actual_chain_verts[len(actual_chain_verts) - 1].index:
middle_vertex_idx = single_unselected_verts_and_neighbors[i][0]
tips_to_discard_idx.append(single_unselected_verts_and_neighbors[i][1])
tips_to_discard_idx.append(single_unselected_verts_and_neighbors[i][2])
-
-
+
+
#### List with pairs of verts that belong to the tips of each selection chain (row).
verts_tips_same_chain_idx = []
if len(all_chains_tips_idx) >= 2:
@@ -1800,21 +1798,21 @@ class GPENCIL_OT_SURFSK_add_surface(bpy.types.Operator):
for i in range(0, len(all_chains_tips_idx)):
if all_chains_tips_idx[i] not in checked_v:
v_chain = self.get_ordered_verts(self.main_object, all_selected_edges_idx, all_verts_idx, all_chains_tips_idx[i], middle_vertex_idx, None)
-
+
verts_tips_same_chain_idx.append([v_chain[0].index, v_chain[len(v_chain) - 1].index])
-
+
checked_v.append(v_chain[0].index)
checked_v.append(v_chain[len(v_chain) - 1].index)
-
-
+
+
#### Selection tips (vertices).
verts_tips_parsed_idx = []
if len(all_chains_tips_idx) >= 2:
for spec_v_idx in all_chains_tips_idx:
if (spec_v_idx not in tips_to_discard_idx):
verts_tips_parsed_idx.append(spec_v_idx)
-
-
+
+
#### Identify the type of selection made by the user.
if middle_vertex_idx != None:
if len(all_chains_tips_idx) == 4 and len(single_unselected_verts_and_neighbors) == 1: # If there are 4 tips (two selection chains), and there is only one single unselected vert (the middle vert).
@@ -1825,7 +1823,7 @@ class GPENCIL_OT_SURFSK_add_surface(bpy.types.Operator):
bpy.ops.object.editmode_toggle('INVOKE_REGION_WIN')
self.cleanup_on_interruption()
self.stopping_errors = True
-
+
return{'CANCELLED'}
else:
if len(all_chains_tips_idx) == 2: # If there are 2 tips
@@ -1841,39 +1839,39 @@ class GPENCIL_OT_SURFSK_add_surface(bpy.types.Operator):
bpy.ops.object.editmode_toggle('INVOKE_REGION_WIN')
self.cleanup_on_interruption()
self.stopping_errors = True
-
+
return{'CANCELLED'}
else:
# The type of the selection was not identified, the script stops.
self.report({'WARNING'}, "The selection isn't valid.")
-
+
bpy.ops.object.editmode_toggle('INVOKE_REGION_WIN')
self.cleanup_on_interruption()
-
+
self.stopping_errors = True
-
+
return{'CANCELLED'}
-
-
-
+
+
+
#### If the selection type is TWO_NOT_CONNECTED and there is only one stroke, stop the script.
if selection_type == "TWO_NOT_CONNECTED" and len(self.main_splines.data.splines) == 1:
self.report({'WARNING'}, "At least two strokes are needed when there are two not connected selections.")
bpy.ops.object.editmode_toggle('INVOKE_REGION_WIN')
self.cleanup_on_interruption()
self.stopping_errors = True
-
+
return{'CANCELLED'}
-
-
-
+
+
+
bpy.ops.object.editmode_toggle('INVOKE_REGION_WIN')
-
+
bpy.ops.object.select_all('INVOKE_REGION_WIN', action='DESELECT')
bpy.data.objects[self.main_splines.name].select = True
bpy.context.scene.objects.active = bpy.context.scene.objects[self.main_splines.name]
-
-
+
+
#### Enter editmode for the new curve (converted from grease pencil strokes), to smooth it out.
bpy.ops.object.editmode_toggle('INVOKE_REGION_WIN')
bpy.ops.curve.smooth('INVOKE_REGION_WIN')
@@ -1883,52 +1881,52 @@ class GPENCIL_OT_SURFSK_add_surface(bpy.types.Operator):
bpy.ops.curve.smooth('INVOKE_REGION_WIN')
bpy.ops.curve.smooth('INVOKE_REGION_WIN')
bpy.ops.object.editmode_toggle('INVOKE_REGION_WIN')
-
-
+
+
self.selection_U_exists = False
self.selection_U2_exists = False
self.selection_V_exists = False
self.selection_V2_exists = False
-
+
self.selection_U_is_closed = False
self.selection_U2_is_closed = False
self.selection_V_is_closed = False
self.selection_V2_is_closed = False
-
+
#### Define what vertices are at the tips of each selection and are not the middle-vertex.
if selection_type == "TWO_CONNECTED":
self.selection_U_exists = True
self.selection_V_exists = True
-
+
closing_vert_U_idx = None
closing_vert_V_idx = None
closing_vert_U2_idx = None
closing_vert_V2_idx = None
-
+
# Determine which selection is Selection-U and which is Selection-V.
points_A = []
points_B = []
points_first_stroke_tips = []
-
+
points_A.append(self.main_object.matrix_world * self.main_object.data.vertices[verts_tips_parsed_idx[0]].co)
points_A.append(self.main_object.matrix_world * self.main_object.data.vertices[middle_vertex_idx].co)
-
+
points_B.append(self.main_object.matrix_world * self.main_object.data.vertices[verts_tips_parsed_idx[1]].co)
points_B.append(self.main_object.matrix_world * self.main_object.data.vertices[middle_vertex_idx].co)
-
+
points_first_stroke_tips.append(self.main_splines.data.splines[0].bezier_points[0].co)
points_first_stroke_tips.append(self.main_splines.data.splines[0].bezier_points[len(self.main_splines.data.splines[0].bezier_points) - 1].co)
-
+
angle_A = self.orientation_difference(points_A, points_first_stroke_tips)
angle_B = self.orientation_difference(points_B, points_first_stroke_tips)
-
+
if angle_A < angle_B:
first_vert_U_idx = verts_tips_parsed_idx[0]
first_vert_V_idx = verts_tips_parsed_idx[1]
else:
first_vert_U_idx = verts_tips_parsed_idx[1]
first_vert_V_idx = verts_tips_parsed_idx[0]
-
+
elif selection_type == "SINGLE" or selection_type == "TWO_NOT_CONNECTED":
first_sketched_point_first_stroke_co = self.main_splines.data.splines[0].bezier_points[0].co
last_sketched_point_first_stroke_co = self.main_splines.data.splines[0].bezier_points[len(self.main_splines.data.splines[0].bezier_points) - 1].co
@@ -1936,57 +1934,57 @@ class GPENCIL_OT_SURFSK_add_surface(bpy.types.Operator):
if len(self.main_splines.data.splines) > 1:
first_sketched_point_second_stroke_co = self.main_splines.data.splines[1].bezier_points[0].co
last_sketched_point_second_stroke_co = self.main_splines.data.splines[1].bezier_points[len(self.main_splines.data.splines[1].bezier_points) - 1].co
-
-
+
+
single_unselected_neighbors = [] # Only the neighbors of the single unselected verts.
for verts_neig_idx in single_unselected_verts_and_neighbors:
single_unselected_neighbors.append(verts_neig_idx[1])
single_unselected_neighbors.append(verts_neig_idx[2])
-
-
+
+
all_chains_tips_and_middle_vert = []
for v_idx in all_chains_tips_idx:
if v_idx not in single_unselected_neighbors:
all_chains_tips_and_middle_vert.append(v_idx)
-
-
+
+
all_chains_tips_and_middle_vert += single_unselected_verts
-
+
all_participating_verts = all_chains_tips_and_middle_vert + all_verts_idx
-
+
# The tip of the selected vertices nearest to the first point of the first sketched stroke.
nearest_tip_to_first_st_first_pt_idx, shortest_distance_to_first_stroke = self.shortest_distance(self.main_object, first_sketched_point_first_stroke_co, all_chains_tips_and_middle_vert)
# If the nearest tip is not from a closed selection, get the opposite tip vertex index.
if nearest_tip_to_first_st_first_pt_idx not in single_unselected_verts or nearest_tip_to_first_st_first_pt_idx == middle_vertex_idx:
nearest_tip_to_first_st_first_pt_opposite_idx = self.opposite_tip(nearest_tip_to_first_st_first_pt_idx, verts_tips_same_chain_idx)
-
+
# The tip of the selected vertices nearest to the last point of the first sketched stroke.
nearest_tip_to_first_st_last_pt_idx, temp_dist = self.shortest_distance(self.main_object, last_sketched_point_first_stroke_co, all_chains_tips_and_middle_vert)
-
+
# The tip of the selected vertices nearest to the first point of the last sketched stroke.
nearest_tip_to_last_st_first_pt_idx, shortest_distance_to_last_stroke = self.shortest_distance(self.main_object, first_sketched_point_last_stroke_co, all_chains_tips_and_middle_vert)
-
+
if len(self.main_splines.data.splines) > 1:
# The selected vertex nearest to the first point of the second sketched stroke. (This will be useful to determine the direction of the closed selection V when extruding along strokes)
nearest_vert_to_second_st_first_pt_idx, temp_dist = self.shortest_distance(self.main_object, first_sketched_point_second_stroke_co, all_verts_idx)
-
+
# The selected vertex nearest to the first point of the second sketched stroke. (This will be useful to determine the direction of the closed selection V2 when extruding along strokes)
nearest_vert_to_second_st_last_pt_idx, temp_dist = self.shortest_distance(self.main_object, last_sketched_point_second_stroke_co, all_verts_idx)
-
-
-
+
+
+
# Determine if the single selection will be treated as U or as V.
edges_sum = 0
for i in all_selected_edges_idx:
edges_sum += ((self.main_object.matrix_world * self.main_object.data.vertices[self.main_object.data.edges[i].vertices[0]].co) - (self.main_object.matrix_world * self.main_object.data.vertices[self.main_object.data.edges[i].vertices[1]].co)).length
-
+
average_edge_length = edges_sum / len(all_selected_edges_idx)
-
-
+
+
# Get shortest distance from the first point of the last stroke to any participating vertex.
temp_idx, shortest_distance_to_last_stroke = self.shortest_distance(self.main_object, first_sketched_point_last_stroke_co, all_participating_verts)
-
-
+
+
if shortest_distance_to_first_stroke < average_edge_length / 4 and shortest_distance_to_last_stroke < average_edge_length and len(self.main_splines.data.splines) > 1: # If the beginning of the first stroke is near enough, and its orientation difference with the first edge of the nearest selection chain is not too high, interpret things as an "extrude along strokes" instead of "extrude through strokes"
self.selection_U_exists = False
self.selection_V_exists = True
@@ -1997,17 +1995,17 @@ class GPENCIL_OT_SURFSK_add_surface(bpy.types.Operator):
closing_vert_U2_idx = None
closing_vert_V_idx = None
closing_vert_V2_idx = None
-
+
first_vert_V_idx = nearest_tip_to_first_st_first_pt_idx
-
+
if selection_type == "TWO_NOT_CONNECTED":
self.selection_V2_exists = True
-
+
first_vert_V2_idx = nearest_tip_to_first_st_last_pt_idx
else:
self.selection_V_is_closed = True
closing_vert_V_idx = nearest_tip_to_first_st_first_pt_idx
-
+
# Get the neighbors of the first (unselected) vert of the closed selection U.
vert_neighbors = []
for verts in single_unselected_verts_and_neighbors:
@@ -2015,9 +2013,9 @@ class GPENCIL_OT_SURFSK_add_surface(bpy.types.Operator):
vert_neighbors.append(verts[1])
vert_neighbors.append(verts[2])
break
-
+
verts_V = self.get_ordered_verts(self.main_object, all_selected_edges_idx, all_verts_idx, vert_neighbors[0], middle_vertex_idx, None)
-
+
for i in range(0, len(verts_V)):
if verts_V[i].index == nearest_vert_to_second_st_first_pt_idx:
if i >= len(verts_V) / 2: # If the vertex nearest to the first point of the second stroke is in the first half of the selected verts.
@@ -2026,23 +2024,23 @@ class GPENCIL_OT_SURFSK_add_surface(bpy.types.Operator):
else:
first_vert_V_idx = vert_neighbors[0]
break
-
-
-
+
+
+
if selection_type == "TWO_NOT_CONNECTED":
self.selection_V2_exists = True
-
+
if nearest_tip_to_first_st_last_pt_idx not in single_unselected_verts or nearest_tip_to_first_st_last_pt_idx == middle_vertex_idx: # If the second selection is not closed.
self.selection_V2_is_closed = False
first_neighbor_V2_idx = None
closing_vert_V2_idx = None
-
+
first_vert_V2_idx = nearest_tip_to_first_st_last_pt_idx
-
+
else:
self.selection_V2_is_closed = True
closing_vert_V2_idx = nearest_tip_to_first_st_last_pt_idx
-
+
# Get the neighbors of the first (unselected) vert of the closed selection U.
vert_neighbors = []
for verts in single_unselected_verts_and_neighbors:
@@ -2050,10 +2048,10 @@ class GPENCIL_OT_SURFSK_add_surface(bpy.types.Operator):
vert_neighbors.append(verts[1])
vert_neighbors.append(verts[2])
break
-
-
+
+
verts_V2 = self.get_ordered_verts(self.main_object, all_selected_edges_idx, all_verts_idx, vert_neighbors[0], middle_vertex_idx, None)
-
+
for i in range(0, len(verts_V2)):
if verts_V2[i].index == nearest_vert_to_second_st_last_pt_idx:
if i >= len(verts_V2) / 2: # If the vertex nearest to the first point of the second stroke is in the first half of the selected verts.
@@ -2062,10 +2060,10 @@ class GPENCIL_OT_SURFSK_add_surface(bpy.types.Operator):
else:
first_vert_V2_idx = vert_neighbors[0]
break
-
+
else:
self.selection_V2_exists = False
-
+
else:
self.selection_U_exists = True
self.selection_V_exists = False
@@ -2073,28 +2071,28 @@ class GPENCIL_OT_SURFSK_add_surface(bpy.types.Operator):
self.selection_U_is_closed = False
first_neighbor_U_idx = None
closing_vert_U_idx = None
-
+
points_tips = []
points_tips.append(self.main_object.matrix_world * self.main_object.data.vertices[nearest_tip_to_first_st_first_pt_idx].co)
points_tips.append(self.main_object.matrix_world * self.main_object.data.vertices[nearest_tip_to_first_st_first_pt_opposite_idx].co)
-
+
points_first_stroke_tips = []
points_first_stroke_tips.append(self.main_splines.data.splines[0].bezier_points[0].co)
points_first_stroke_tips.append(self.main_splines.data.splines[0].bezier_points[len(self.main_splines.data.splines[0].bezier_points) - 1].co)
-
+
vec_A = points_tips[0] - points_tips[1]
vec_B = points_first_stroke_tips[0] - points_first_stroke_tips[1]
-
+
# Compare the direction of the selection and the first grease pencil stroke to determine which is the "first" vertex of the selection.
if vec_A.dot(vec_B) < 0:
first_vert_U_idx = nearest_tip_to_first_st_first_pt_opposite_idx
else:
first_vert_U_idx = nearest_tip_to_first_st_first_pt_idx
-
+
else:
self.selection_U_is_closed = True
closing_vert_U_idx = nearest_tip_to_first_st_first_pt_idx
-
+
# Get the neighbors of the first (unselected) vert of the closed selection U.
vert_neighbors = []
for verts in single_unselected_verts_and_neighbors:
@@ -2102,40 +2100,40 @@ class GPENCIL_OT_SURFSK_add_surface(bpy.types.Operator):
vert_neighbors.append(verts[1])
vert_neighbors.append(verts[2])
break
-
+
points_first_and_neighbor = []
points_first_and_neighbor.append(self.main_object.matrix_world * self.main_object.data.vertices[nearest_tip_to_first_st_first_pt_idx].co)
points_first_and_neighbor.append(self.main_object.matrix_world * self.main_object.data.vertices[vert_neighbors[0]].co)
-
+
points_first_stroke_tips = []
points_first_stroke_tips.append(self.main_splines.data.splines[0].bezier_points[0].co)
points_first_stroke_tips.append(self.main_splines.data.splines[0].bezier_points[1].co)
-
+
vec_A = points_first_and_neighbor[0] - points_first_and_neighbor[1]
vec_B = points_first_stroke_tips[0] - points_first_stroke_tips[1]
-
+
# Compare the direction of the selection and the first grease pencil stroke to determine which is the vertex neighbor to the first vertex (unselected) of the closed selection. This will determine the direction of the closed selection.
if vec_A.dot(vec_B) < 0:
first_vert_U_idx = vert_neighbors[1]
else:
first_vert_U_idx = vert_neighbors[0]
-
-
-
+
+
+
if selection_type == "TWO_NOT_CONNECTED":
self.selection_U2_exists = True
-
+
if nearest_tip_to_last_st_first_pt_idx not in single_unselected_verts or nearest_tip_to_last_st_first_pt_idx == middle_vertex_idx: # If the second selection is not closed.
self.selection_U2_is_closed = False
first_neighbor_U2_idx = None
closing_vert_U2_idx = None
-
+
first_vert_U2_idx = nearest_tip_to_last_st_first_pt_idx
-
+
else:
self.selection_U2_is_closed = True
closing_vert_U2_idx = nearest_tip_to_last_st_first_pt_idx
-
+
# Get the neighbors of the first (unselected) vert of the closed selection U.
vert_neighbors = []
for verts in single_unselected_verts_and_neighbors:
@@ -2143,127 +2141,127 @@ class GPENCIL_OT_SURFSK_add_surface(bpy.types.Operator):
vert_neighbors.append(verts[1])
vert_neighbors.append(verts[2])
break
-
+
points_first_and_neighbor = []
points_first_and_neighbor.append(self.main_object.matrix_world * self.main_object.data.vertices[nearest_tip_to_last_st_first_pt_idx].co)
points_first_and_neighbor.append(self.main_object.matrix_world * self.main_object.data.vertices[vert_neighbors[0]].co)
-
+
points_last_stroke_tips = []
points_last_stroke_tips.append(self.main_splines.data.splines[len(self.main_splines.data.splines) - 1].bezier_points[0].co)
points_last_stroke_tips.append(self.main_splines.data.splines[len(self.main_splines.data.splines) - 1].bezier_points[1].co)
-
+
vec_A = points_first_and_neighbor[0] - points_first_and_neighbor[1]
vec_B = points_last_stroke_tips[0] - points_last_stroke_tips[1]
-
+
# Compare the direction of the selection and the last grease pencil stroke to determine which is the vertex neighbor to the first vertex (unselected) of the closed selection. This will determine the direction of the closed selection.
if vec_A.dot(vec_B) < 0:
first_vert_U2_idx = vert_neighbors[1]
else:
first_vert_U2_idx = vert_neighbors[0]
-
+
else:
self.selection_U2_exists = False
-
+
elif selection_type == "NO_SELECTION":
self.selection_U_exists = False
self.selection_V_exists = False
-
-
-
+
+
+
#### Get an ordered list of the vertices of Selection-U.
verts_ordered_U = []
if self.selection_U_exists:
verts_ordered_U = self.get_ordered_verts(self.main_object, all_selected_edges_idx, all_verts_idx, first_vert_U_idx, middle_vertex_idx, closing_vert_U_idx)
verts_ordered_U_indices = [x.index for x in verts_ordered_U]
-
+
#### Get an ordered list of the vertices of Selection-U2.
verts_ordered_U2 = []
if self.selection_U2_exists:
verts_ordered_U2 = self.get_ordered_verts(self.main_object, all_selected_edges_idx, all_verts_idx, first_vert_U2_idx, middle_vertex_idx, closing_vert_U2_idx)
verts_ordered_U2_indices = [x.index for x in verts_ordered_U2]
-
+
#### Get an ordered list of the vertices of Selection-V.
verts_ordered_V = []
if self.selection_V_exists:
verts_ordered_V = self.get_ordered_verts(self.main_object, all_selected_edges_idx, all_verts_idx, first_vert_V_idx, middle_vertex_idx, closing_vert_V_idx)
verts_ordered_V_indices = [x.index for x in verts_ordered_V]
-
+
#### Get an ordered list of the vertices of Selection-V2.
verts_ordered_V2 = []
if self.selection_V2_exists:
verts_ordered_V2 = self.get_ordered_verts(self.main_object, all_selected_edges_idx, all_verts_idx, first_vert_V2_idx, middle_vertex_idx, closing_vert_V2_idx)
verts_ordered_V2_indices = [x.index for x in verts_ordered_V2]
-
-
-
+
+
+
#### Check if when there are two-not-connected selections both have the same number of verts. If not terminate the script.
if ((self.selection_U2_exists and len(verts_ordered_U) != len(verts_ordered_U2)) or (self.selection_V2_exists and len(verts_ordered_V) != len(verts_ordered_V2))):
# Display a warning.
self.report({'WARNING'}, "Both selections must have the same number of edges")
-
+
self.cleanup_on_interruption()
-
+
self.stopping_errors = True
-
+
return{'CANCELLED'}
-
-
-
+
+
+
#### Calculate edges U proportions.
-
+
# Sum selected edges U lengths.
edges_lengths_U = []
edges_lengths_sum_U = 0
-
+
if self.selection_U_exists:
edges_lengths_U, edges_lengths_sum_U = self.get_chain_length(self.main_object, verts_ordered_U)
-
+
if self.selection_U2_exists:
edges_lengths_U2, edges_lengths_sum_U2 = self.get_chain_length(self.main_object, verts_ordered_U2)
-
+
# Sum selected edges V lengths.
edges_lengths_V = []
edges_lengths_sum_V = 0
-
+
if self.selection_V_exists:
edges_lengths_V, edges_lengths_sum_V = self.get_chain_length(self.main_object, verts_ordered_V)
-
+
if self.selection_V2_exists:
edges_lengths_V2, edges_lengths_sum_V2 = self.get_chain_length(self.main_object, verts_ordered_V2)
-
-
+
+
bpy.ops.object.editmode_toggle('INVOKE_REGION_WIN')
bpy.ops.curve.subdivide('INVOKE_REGION_WIN', number_cuts = bpy.context.scene.SURFSK_precision)
bpy.ops.object.editmode_toggle('INVOKE_REGION_WIN')
-
-
+
+
# Proportions U.
edges_proportions_U = []
edges_proportions_U = self.get_edges_proportions(edges_lengths_U, edges_lengths_sum_U, self.selection_U_exists, self.edges_U)
verts_count_U = len(edges_proportions_U) + 1
-
+
if self.selection_U2_exists:
edges_proportions_U2 = []
edges_proportions_U2 = self.get_edges_proportions(edges_lengths_U2, edges_lengths_sum_U2, self.selection_U2_exists, self.edges_V)
verts_count_U2 = len(edges_proportions_U2) + 1
-
+
# Proportions V.
edges_proportions_V = []
edges_proportions_V = self.get_edges_proportions(edges_lengths_V, edges_lengths_sum_V, self.selection_V_exists, self.edges_V)
verts_count_V = len(edges_proportions_V) + 1
-
+
if self.selection_V2_exists:
edges_proportions_V2 = []
edges_proportions_V2 = self.get_edges_proportions(edges_lengths_V2, edges_lengths_sum_V2, self.selection_V2_exists, self.edges_V)
verts_count_V2 = len(edges_proportions_V2) + 1
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
#### Cyclic Follow: simplify sketched curves, make them Cyclic, and complete the actual sketched curves with a "closing segment".
if self.cyclic_follow and not self.selection_V_exists and not ((self.selection_U_exists and not self.selection_U_is_closed) or (self.selection_U2_exists and not self.selection_U2_is_closed)):
simplified_spline_coords = []
@@ -2275,310 +2273,310 @@ class GPENCIL_OT_SURFSK_add_surface(bpy.types.Operator):
simplified_curve.append(bpy.data.curves.new('SURFSKIO_simpl_crv', 'CURVE'))
ob_simplified_curve.append(bpy.data.objects.new('SURFSKIO_simpl_crv', simplified_curve[i]))
bpy.context.scene.objects.link(ob_simplified_curve[i])
-
+
simplified_curve[i].dimensions = "3D"
-
+
spline_coords = []
for bp in self.main_splines.data.splines[i].bezier_points:
spline_coords.append(bp.co)
-
+
# Simplification.
simplified_spline_coords.append(self.simplify_spline(spline_coords, 5))
-
+
# Get the coordinates of the first vert of the actual spline.
splines_first_v_co.append(simplified_spline_coords[i][0])
-
-
+
+
# Generate the spline.
spline = simplified_curve[i].splines.new('BEZIER')
spline.bezier_points.add(len(simplified_spline_coords[i]) - 1) # less one because one point is added when the spline is created.
for p in range(0, len(simplified_spline_coords[i])):
spline.bezier_points[p].co = simplified_spline_coords[i][p]
-
-
+
+
spline.use_cyclic_u = True
-
+
spline_bp_count = len(spline.bezier_points)
-
+
bpy.ops.object.select_all('INVOKE_REGION_WIN', action='DESELECT')
bpy.data.objects[ob_simplified_curve[i].name].select = True
bpy.context.scene.objects.active = bpy.context.scene.objects[ob_simplified_curve[i].name]
-
+
bpy.ops.object.editmode_toggle('INVOKE_REGION_WIN')
bpy.ops.curve.select_all('INVOKE_REGION_WIN', action='SELECT')
bpy.ops.curve.handle_type_set('INVOKE_REGION_WIN', type='AUTOMATIC')
bpy.ops.curve.select_all('INVOKE_REGION_WIN', action='DESELECT')
bpy.ops.object.editmode_toggle('INVOKE_REGION_WIN')
-
-
+
+
# Select the "closing segment", and subdivide it.
ob_simplified_curve[i].data.splines[0].bezier_points[0].select_control_point = True
ob_simplified_curve[i].data.splines[0].bezier_points[0].select_left_handle = True
ob_simplified_curve[i].data.splines[0].bezier_points[0].select_right_handle = True
-
+
ob_simplified_curve[i].data.splines[0].bezier_points[spline_bp_count - 1].select_control_point = True
ob_simplified_curve[i].data.splines[0].bezier_points[spline_bp_count - 1].select_left_handle = True
ob_simplified_curve[i].data.splines[0].bezier_points[spline_bp_count - 1].select_right_handle = True
-
+
bpy.ops.object.editmode_toggle('INVOKE_REGION_WIN')
segments = sqrt((ob_simplified_curve[i].data.splines[0].bezier_points[0].co - ob_simplified_curve[i].data.splines[0].bezier_points[spline_bp_count - 1].co).length / self.average_gp_segment_length)
for t in range(2):
bpy.ops.curve.subdivide('INVOKE_REGION_WIN', number_cuts = segments)
-
-
+
+
# Delete the other vertices and make it non-cyclic to keep only the needed verts of the "closing segment".
bpy.ops.curve.select_all(action = 'INVERT')
bpy.ops.curve.delete(type='VERT')
ob_simplified_curve[i].data.splines[0].use_cyclic_u = False
bpy.ops.object.editmode_toggle('INVOKE_REGION_WIN')
-
-
+
+
# Add the points of the "closing segment" to the original curve from grease pencil stroke.
first_new_index = len(self.main_splines.data.splines[i].bezier_points)
self.main_splines.data.splines[i].bezier_points.add(len(ob_simplified_curve[i].data.splines[0].bezier_points) - 1)
for t in range(1, len(ob_simplified_curve[i].data.splines[0].bezier_points)):
self.main_splines.data.splines[i].bezier_points[t - 1 + first_new_index].co = ob_simplified_curve[i].data.splines[0].bezier_points[t].co
-
-
+
+
# Delete the temporal curve.
bpy.ops.object.select_all('INVOKE_REGION_WIN', action='DESELECT')
bpy.data.objects[ob_simplified_curve[i].name].select = True
bpy.context.scene.objects.active = bpy.context.scene.objects[ob_simplified_curve[i].name]
-
+
bpy.ops.object.delete()
-
-
-
+
+
+
#### Get the coords of the points distributed along the sketched strokes, with proportions-U of the first selection.
pts_on_strokes_with_proportions_U = self.distribute_pts(self.main_splines.data.splines, edges_proportions_U)
-
+
sketched_splines_parsed = []
-
+
if self.selection_U2_exists:
# Initialize the multidimensional list with the proportions of all the segments.
proportions_loops_crossing_strokes = []
for i in range(len(pts_on_strokes_with_proportions_U)):
proportions_loops_crossing_strokes.append([])
-
+
for t in range(len(pts_on_strokes_with_proportions_U[0])):
proportions_loops_crossing_strokes[i].append(None)
-
-
+
+
# Calculate the proportions of each segment of the loops-U from pts_on_strokes_with_proportions_U.
for lp in range(len(pts_on_strokes_with_proportions_U[0])):
loop_segments_lengths = []
-
+
for st in range(len(pts_on_strokes_with_proportions_U)):
if st == 0: # When on the first stroke, add the segment from the selection to the dirst stroke.
loop_segments_lengths.append(((self.main_object.matrix_world * verts_ordered_U[lp].co) - pts_on_strokes_with_proportions_U[0][lp]).length)
-
+
if st != len(pts_on_strokes_with_proportions_U) - 1: # For all strokes except for the last, calculate the distance from the actual stroke to the next.
loop_segments_lengths.append((pts_on_strokes_with_proportions_U[st][lp] - pts_on_strokes_with_proportions_U[st + 1][lp]).length)
-
+
if st == len(pts_on_strokes_with_proportions_U) - 1: # When on the last stroke, add the segments from the last stroke to the second selection.
loop_segments_lengths.append((pts_on_strokes_with_proportions_U[st][lp] - (self.main_object.matrix_world * verts_ordered_U2[lp].co)).length)
-
+
# Calculate full loop length.
loop_seg_lengths_sum = 0
for i in range(len(loop_segments_lengths)):
loop_seg_lengths_sum += loop_segments_lengths[i]
-
+
# Fill the multidimensional list with the proportions of all the segments.
for st in range(len(pts_on_strokes_with_proportions_U)):
proportions_loops_crossing_strokes[st][lp] = loop_segments_lengths[st] / loop_seg_lengths_sum
-
-
+
+
# Calculate proportions for each stroke.
for st in range(len(pts_on_strokes_with_proportions_U)):
actual_stroke_spline = []
actual_stroke_spline.append(self.main_splines.data.splines[st]) # Needs to be a list for the "distribute_pts" method.
-
+
# Calculate the proportions for the actual stroke.
actual_edges_proportions_U = []
for i in range(len(edges_proportions_U)):
proportions_sum = 0
-
+
# Sum the proportions of this loop up to the actual.
for t in range(0, st + 1):
proportions_sum += proportions_loops_crossing_strokes[t][i]
-
+
actual_edges_proportions_U.append(edges_proportions_U[i] - ((edges_proportions_U[i] - edges_proportions_U2[i]) * proportions_sum)) # i + 1, because proportions_loops_crossing_strokes refers to loops, and the proportions refer to edges, so we start at the element 1 of proportions_loops_crossing_strokes instead of element 0.
-
-
+
+
points_actual_spline = self.distribute_pts(actual_stroke_spline, actual_edges_proportions_U)
sketched_splines_parsed.append(points_actual_spline[0])
-
+
else:
sketched_splines_parsed = pts_on_strokes_with_proportions_U
-
-
-
+
+
+
#### If the selection type is "TWO_NOT_CONNECTED" replace the points of the last spline with the points in the "target" selection.
if selection_type == "TWO_NOT_CONNECTED":
if self.selection_U2_exists:
for i in range(0, len(sketched_splines_parsed[len(sketched_splines_parsed) - 1])):
sketched_splines_parsed[len(sketched_splines_parsed) - 1][i] = self.main_object.matrix_world * verts_ordered_U2[i].co
-
-
+
+
#### Create temporary curves along the "control-points" found on the sketched curves and the mesh selection.
mesh_ctrl_pts_name = "SURFSKIO_ctrl_pts"
me = bpy.data.meshes.new(mesh_ctrl_pts_name)
ob_ctrl_pts = bpy.data.objects.new(mesh_ctrl_pts_name, me)
ob_ctrl_pts.data = me
bpy.context.scene.objects.link(ob_ctrl_pts)
-
-
+
+
cyclic_loops_U = []
first_verts = []
second_verts = []
last_verts = []
for i in range(0, verts_count_U):
vert_num_in_spline = 1
-
+
if self.selection_U_exists:
ob_ctrl_pts.data.vertices.add(1)
last_v = ob_ctrl_pts.data.vertices[len(ob_ctrl_pts.data.vertices) - 1]
last_v.co = self.main_object.matrix_world * verts_ordered_U[i].co
-
+
vert_num_in_spline += 1
-
-
+
+
for t in range(0, len(sketched_splines_parsed)):
ob_ctrl_pts.data.vertices.add(1)
v = ob_ctrl_pts.data.vertices[len(ob_ctrl_pts.data.vertices) - 1]
v.co = sketched_splines_parsed[t][i]
-
-
+
+
if vert_num_in_spline > 1:
ob_ctrl_pts.data.edges.add(1)
ob_ctrl_pts.data.edges[len(ob_ctrl_pts.data.edges) - 1].vertices[0] = len(ob_ctrl_pts.data.vertices) - 2
ob_ctrl_pts.data.edges[len(ob_ctrl_pts.data.edges) - 1].vertices[1] = len(ob_ctrl_pts.data.vertices) - 1
-
+
if t == 0:
first_verts.append(v.index)
-
+
if t == 1:
second_verts.append(v.index)
-
+
if t == len(sketched_splines_parsed) - 1:
last_verts.append(v.index)
-
-
+
+
last_v = v
-
+
vert_num_in_spline += 1
-
-
+
+
bpy.ops.object.select_all('INVOKE_REGION_WIN', action='DESELECT')
bpy.data.objects[ob_ctrl_pts.name].select = True
bpy.context.scene.objects.active = bpy.data.objects[ob_ctrl_pts.name]
-
+
bpy.ops.object.editmode_toggle('INVOKE_REGION_WIN')
bpy.ops.mesh.select_all(action='DESELECT')
bpy.ops.object.editmode_toggle('INVOKE_REGION_WIN')
-
-
+
+
#### Determine which loops-U will be "Cyclic".
for i in range(0, len(first_verts)):
if self.automatic_join and not self.cyclic_cross and selection_type != "TWO_CONNECTED" and len(self.main_splines.data.splines) >= 3: # When there is Cyclic Cross there is no need of Automatic Join, (and there are at least three strokes).
v = ob_ctrl_pts.data.vertices
-
+
first_point_co = v[first_verts[i]].co
second_point_co = v[second_verts[i]].co
last_point_co = v[last_verts[i]].co
-
+
# Coordinates of the point in the center of both the first and last verts.
verts_center_co = [(first_point_co[0] + last_point_co[0]) / 2, (first_point_co[1] + last_point_co[1]) / 2, (first_point_co[2] + last_point_co[2]) / 2]
-
+
vec_A = second_point_co - first_point_co
vec_B = second_point_co - mathutils.Vector(verts_center_co)
-
-
+
+
# Calculate the length of the first segment of the loop, and the length it would have after moving the first vert to the middle position between first and last.
length_original = (second_point_co - first_point_co).length
length_target = (second_point_co - mathutils.Vector(verts_center_co)).length
-
+
angle = vec_A.angle(vec_B) / math.pi
-
-
+
+
if length_target <= length_original * 1.03 * self.join_stretch_factor and angle <= 0.008 * self.join_stretch_factor and not self.selection_U_exists: # If the target length doesn't stretch too much, and the its angle doesn't change to much either.
cyclic_loops_U.append(True)
-
+
# Move the first vert to the center coordinates.
ob_ctrl_pts.data.vertices[first_verts[i]].co = verts_center_co
-
+
# Select the last verts from Cyclic loops, for later deletion all at once.
v[last_verts[i]].select = True
-
+
else:
cyclic_loops_U.append(False)
-
+
else:
if self.cyclic_cross and not self.selection_U_exists and not ((self.selection_V_exists and not self.selection_V_is_closed) or (self.selection_V2_exists and not self.selection_V2_is_closed)): # If "Cyclic Cross" is active then "all" crossing curves become cyclic.
cyclic_loops_U.append(True)
else:
cyclic_loops_U.append(False)
-
+
# The cyclic_loops_U list needs to be reversed.
cyclic_loops_U.reverse()
-
+
# Delete the previously selected (last_)verts.
bpy.ops.object.editmode_toggle('INVOKE_REGION_WIN')
bpy.ops.mesh.delete('INVOKE_REGION_WIN', type='VERT')
bpy.ops.object.editmode_toggle('INVOKE_REGION_WIN')
-
+
# Create curves from control points.
bpy.ops.object.convert('INVOKE_REGION_WIN', target='CURVE', keep_original=False)
ob_curves_surf = bpy.context.scene.objects.active
bpy.ops.object.editmode_toggle('INVOKE_REGION_WIN')
bpy.ops.curve.spline_type_set('INVOKE_REGION_WIN', type='BEZIER')
bpy.ops.curve.handle_type_set('INVOKE_REGION_WIN', type='AUTOMATIC')
-
+
# Make Cyclic the splines designated as Cyclic.
for i in range(0, len(cyclic_loops_U)):
ob_curves_surf.data.splines[i].use_cyclic_u = cyclic_loops_U[i]
-
-
+
+
#### Get the coords of all points on first loop-U, for later comparison with its subdivided version, to know which points of the loops-U are crossed by the original strokes. The indices wiil be the same for the other loops-U.
if self.loops_on_strokes:
coords_loops_U_control_points = []
for p in ob_ctrl_pts.data.splines[0].bezier_points:
coords_loops_U_control_points.append(["%.4f" % p.co[0], "%.4f" % p.co[1], "%.4f" % p.co[2]])
-
+
tuple(coords_loops_U_control_points)
-
-
+
+
# Calculate number of edges-V in case option "Loops on strokes" is active or inactive.
if self.loops_on_strokes and not self.selection_V_exists:
edges_V_count = len(self.main_splines.data.splines) * self.edges_V
else:
edges_V_count = len(edges_proportions_V)
-
-
+
+
# The Follow precision will vary depending on the number of Follow face-loops.
precision_multiplier = round(2 + (edges_V_count / 15))
-
+
curve_cuts = bpy.context.scene.SURFSK_precision * precision_multiplier
-
+
# Subdivide the curves.
bpy.ops.curve.subdivide('INVOKE_REGION_WIN', number_cuts = curve_cuts)
-
+
# The verts position shifting that happens with splines subdivision. For later reorder splines points.
verts_position_shift = curve_cuts + 1
-
+
bpy.ops.object.editmode_toggle('INVOKE_REGION_WIN')
-
-
+
+
# Reorder coordinates of the points of each spline to put the first point of the spline starting at the position it was the first point before sudividing the curve. And make a new curve object per spline (to handle memory better later).
splines_U_objects = []
for i in range(len(ob_curves_surf.data.splines)):
spline_U_curve = bpy.data.curves.new('SURFSKIO_spline_U_' + str(i), 'CURVE')
ob_spline_U = bpy.data.objects.new('SURFSKIO_spline_U_' + str(i), spline_U_curve)
bpy.context.scene.objects.link(ob_spline_U)
-
+
spline_U_curve.dimensions = "3D"
-
-
+
+
# Add points to the spline in the new curve object.
ob_spline_U.data.splines.new('BEZIER')
for t in range(len(ob_curves_surf.data.splines[i].bezier_points)):
@@ -2589,29 +2587,29 @@ class GPENCIL_OT_SURFSK_add_surface(bpy.types.Operator):
point_index = t + verts_position_shift - len(ob_curves_surf.data.splines[i].bezier_points)
else:
point_index = t
-
+
if t > 0: # to avoid adding the first point since it's added when the spline is created.
ob_spline_U.data.splines[0].bezier_points.add(1)
ob_spline_U.data.splines[0].bezier_points[t].co = ob_curves_surf.data.splines[i].bezier_points[point_index].co
-
-
+
+
if cyclic_loops_U[i] == True and not self.selection_U_exists: # If the loop is cyclic.
# Add a last point at the same location as the first one.
ob_spline_U.data.splines[0].bezier_points.add(1)
ob_spline_U.data.splines[0].bezier_points[len(ob_spline_U.data.splines[0].bezier_points) - 1].co = ob_spline_U.data.splines[0].bezier_points[0].co
else:
ob_spline_U.data.splines[0].use_cyclic_u = False
-
-
+
+
splines_U_objects.append(ob_spline_U)
-
-
+
+
bpy.ops.object.select_all('INVOKE_REGION_WIN', action='DESELECT')
bpy.data.objects[ob_spline_U.name].select = True
bpy.context.scene.objects.active = bpy.data.objects[ob_spline_U.name]
-
-
-
+
+
+
#### When option "Loops on strokes" is active each "Cross" loop will have its own proportions according to where the original strokes "touch" them.
if self.loops_on_strokes:
# Get the indices of points where the original strokes "touch" loops-U.
@@ -2620,37 +2618,37 @@ class GPENCIL_OT_SURFSK_add_surface(bpy.types.Operator):
bp = splines_U_objects[0].data.splines[0].bezier_points[i]
if ["%.4f" % bp.co[0], "%.4f" % bp.co[1], "%.4f" % bp.co[2]] in coords_loops_U_control_points:
points_U_crossed_by_strokes.append(i)
-
+
# Make a dictionary with the number of the edge, in the selected chain V, corresponding to each stroke.
edge_order_number_for_splines = {}
if self.selection_V_exists:
# For two-connected selections add a first hypothetic stroke at the begining.
if selection_type == "TWO_CONNECTED":
edge_order_number_for_splines[0] = 0
-
-
+
+
for i in range(len(self.main_splines.data.splines)):
sp = self.main_splines.data.splines[i]
v_idx, dist_temp = self.shortest_distance(self.main_object, sp.bezier_points[0].co, verts_ordered_V_indices)
-
+
edge_idx_in_chain = verts_ordered_V_indices.index(v_idx) # Get the position (edges count) of the vert v_idx in the selected chain V.
-
+
# For two-connected selections the strokes go after the hypothetic stroke added before, so the index adds one per spline.
if selection_type == "TWO_CONNECTED":
spline_number = i + 1
else:
spline_number = i
-
+
edge_order_number_for_splines[spline_number] = edge_idx_in_chain
-
-
+
+
# Get the first and last verts indices for later comparison.
if i == 0:
first_v_idx = v_idx
elif i == len(self.main_splines.data.splines) - 1:
last_v_idx = v_idx
-
-
+
+
if self.selection_V_is_closed:
# If there is no last stroke on the last vertex (same as first vertex), add a hypothetic spline at last vert order.
if first_v_idx != last_v_idx:
@@ -2661,9 +2659,9 @@ class GPENCIL_OT_SURFSK_add_surface(bpy.types.Operator):
edge_order_number_for_splines[(len(self.main_splines.data.splines) - 1) + 1] = len(verts_ordered_V_indices) - 1
else:
edge_order_number_for_splines[len(self.main_splines.data.splines) - 1] = len(verts_ordered_V_indices) - 1
-
-
-
+
+
+
#### Get the coords of the points distributed along the "crossing curves", with appropriate proportions-V.
surface_splines_parsed = []
for i in range(len(splines_U_objects)):
@@ -2676,36 +2674,36 @@ class GPENCIL_OT_SURFSK_add_surface(bpy.types.Operator):
segments_distances = []
for t in range(len(sp_ob.data.splines[0].bezier_points)):
bp = sp_ob.data.splines[0].bezier_points[t]
-
+
if t == 0:
last_p = bp.co
else:
actual_p = bp.co
dist += (last_p - actual_p).length
-
+
if t in points_U_crossed_by_strokes:
segments_distances.append(dist)
full_dist += dist
-
+
dist = 0
-
+
last_p = actual_p
-
+
# Calculate Proportions.
used_edges_proportions_V = []
for t in range(len(segments_distances)):
if self.selection_V_exists:
if t == 0:
order_number_last_stroke = 0
-
+
segment_edges_length_V = 0
segment_edges_length_V2 = 0
for order in range(order_number_last_stroke, edge_order_number_for_splines[t + 1]):
segment_edges_length_V += edges_lengths_V[order]
if self.selection_V2_exists:
segment_edges_length_V2 += edges_lengths_V2[order]
-
-
+
+
for order in range(order_number_last_stroke, edge_order_number_for_splines[t + 1]):
# Calculate each "sub-segment" (the ones between each stroke) length.
if self.selection_V2_exists:
@@ -2714,20 +2712,20 @@ class GPENCIL_OT_SURFSK_add_surface(bpy.types.Operator):
else:
proportion_sub_seg = edges_lengths_V[order] / segment_edges_length_V
sub_seg_dist = segments_distances[t] * proportion_sub_seg
-
+
used_edges_proportions_V.append(sub_seg_dist / full_dist)
-
+
order_number_last_stroke = edge_order_number_for_splines[t + 1]
-
+
else:
for c in range(self.edges_V):
# Calculate each "sub-segment" (the ones between each stroke) length.
- sub_seg_dist = segments_distances[t] / self.edges_V
+ sub_seg_dist = segments_distances[t] / self.edges_V
used_edges_proportions_V.append(sub_seg_dist / full_dist)
-
+
actual_spline = self.distribute_pts(sp_ob.data.splines, used_edges_proportions_V)
surface_splines_parsed.append(actual_spline[0])
-
+
else:
if self.selection_V2_exists:
used_edges_proportions_V = []
@@ -2735,26 +2733,26 @@ class GPENCIL_OT_SURFSK_add_surface(bpy.types.Operator):
used_edges_proportions_V.append(edges_proportions_V2[p] - ((edges_proportions_V2[p] - edges_proportions_V[p]) / len(splines_U_objects) * i))
else:
used_edges_proportions_V = edges_proportions_V
-
+
actual_spline = self.distribute_pts(sp_ob.data.splines, used_edges_proportions_V)
surface_splines_parsed.append(actual_spline[0])
-
-
-
-
+
+
+
+
# Set the verts of the first and last splines to the locations of the respective verts in the selections.
if self.selection_V_exists:
for i in range(0, len(surface_splines_parsed[0])):
surface_splines_parsed[len(surface_splines_parsed) - 1][i] = self.main_object.matrix_world * verts_ordered_V[i].co
-
+
if selection_type == "TWO_NOT_CONNECTED":
if self.selection_V2_exists:
for i in range(0, len(surface_splines_parsed[0])):
surface_splines_parsed[0][i] = self.main_object.matrix_world * verts_ordered_V2[i].co
-
-
-
-
+
+
+
+
# When "Automatic join" option is active (and the selection type is not "TWO_CONNECTED"), merge the verts of the tips of the loops when they are "near enough".
if self.automatic_join and selection_type != "TWO_CONNECTED":
#### Join the tips of "Follow" loops that are near enough and must be "closed".
@@ -2763,226 +2761,226 @@ class GPENCIL_OT_SURFSK_add_surface(bpy.types.Operator):
sp = surface_splines_parsed
loop_segment_dist = (sp[0][i] - sp[1][i]).length
full_loop_dist = loop_segment_dist * self.edges_U
-
+
verts_middle_position_co = [(sp[0][i][0] + sp[len(sp) - 1][i][0]) / 2, (sp[0][i][1] + sp[len(sp) - 1][i][1]) / 2, (sp[0][i][2] + sp[len(sp) - 1][i][2]) / 2]
-
+
points_original = []
points_original.append(sp[1][i])
points_original.append(sp[0][i])
-
+
points_target = []
points_target.append(sp[1][i])
points_target.append(mathutils.Vector(verts_middle_position_co))
-
+
vec_A = points_original[0] - points_original[1]
vec_B = points_target[0] - points_target[1]
-
-
+
+
angle = vec_A.angle(vec_B) / math.pi
-
+
edge_new_length = (mathutils.Vector(verts_middle_position_co) - sp[1][i]).length
-
+
if edge_new_length <= loop_segment_dist * 1.5 * self.join_stretch_factor and angle < 0.25 * self.join_stretch_factor: # If after moving the verts to the middle point, the segment doesn't stretch too much.
if not (self.selection_U_exists and i == 0) and not (self.selection_U2_exists and i == len(surface_splines_parsed[0]) - 1): # Avoid joining when the actual loop must be merged with the original mesh.
# Change the coords of both verts to the middle position.
surface_splines_parsed[0][i] = verts_middle_position_co
surface_splines_parsed[len(surface_splines_parsed) - 1][i] = verts_middle_position_co
-
-
-
+
+
+
#### Delete object with control points and object from grease pencil convertion.
bpy.ops.object.select_all('INVOKE_REGION_WIN', action='DESELECT')
bpy.data.objects[ob_ctrl_pts.name].select = True
bpy.context.scene.objects.active = bpy.data.objects[ob_ctrl_pts.name]
-
+
bpy.ops.object.delete()
-
-
+
+
for sp_ob in splines_U_objects:
bpy.ops.object.select_all('INVOKE_REGION_WIN', action='DESELECT')
bpy.data.objects[sp_ob.name].select = True
bpy.context.scene.objects.active = bpy.data.objects[sp_ob.name]
-
+
bpy.ops.object.delete()
-
-
-
-
+
+
+
+
#### Generate surface.
-
+
# Get all verts coords.
all_surface_verts_co = []
for i in range(0, len(surface_splines_parsed)):
# Get coords of all verts and make a list with them
for pt_co in surface_splines_parsed[i]:
all_surface_verts_co.append(pt_co)
-
-
+
+
# Define verts for each face.
all_surface_faces = []
for i in range(0, len(all_surface_verts_co) - len(surface_splines_parsed[0])):
if ((i + 1) / len(surface_splines_parsed[0]) != int((i + 1) / len(surface_splines_parsed[0]))):
all_surface_faces.append([i+1, i , i + len(surface_splines_parsed[0]), i + len(surface_splines_parsed[0]) + 1])
-
-
+
+
# Build the mesh.
surf_me_name = "SURFSKIO_surface"
me_surf = bpy.data.meshes.new(surf_me_name)
-
+
me_surf.from_pydata(all_surface_verts_co, [], all_surface_faces)
-
+
me_surf.update()
-
+
ob_surface = bpy.data.objects.new(surf_me_name, me_surf)
bpy.context.scene.objects.link(ob_surface)
-
-
+
+
# Select all the "unselected but participating" verts, from closed selection or double selections with middle-vertex, for later join with remove doubles.
for v_idx in single_unselected_verts:
self.main_object.data.vertices[v_idx].select = True
-
-
+
+
#### Join the new mesh to the main object.
ob_surface.select = True
self.main_object.select = True
bpy.context.scene.objects.active = bpy.data.objects[self.main_object.name]
-
+
bpy.ops.object.join('INVOKE_REGION_WIN')
-
+
bpy.ops.object.editmode_toggle('INVOKE_REGION_WIN')
-
+
bpy.ops.mesh.remove_doubles('INVOKE_REGION_WIN', threshold=0.0001)
bpy.ops.mesh.normals_make_consistent('INVOKE_REGION_WIN', inside=False)
bpy.ops.mesh.select_all('INVOKE_REGION_WIN', action='DESELECT')
-
-
-
+
+
+
return{'FINISHED'}
-
-
-
+
+
+
def execute(self, context):
bpy.context.user_preferences.edit.use_global_undo = False
-
+
if not self.is_fill_faces:
bpy.ops.wm.context_set_value(data_path='tool_settings.mesh_select_mode', value='True, False, False')
-
+
# Build splines from the "last saved splines".
last_saved_curve = bpy.data.curves.new('SURFSKIO_last_crv', 'CURVE')
self.main_splines = bpy.data.objects.new('SURFSKIO_last_crv', last_saved_curve)
bpy.context.scene.objects.link(self.main_splines)
-
+
last_saved_curve.dimensions = "3D"
-
+
for sp in self.last_strokes_splines_coords:
spline = self.main_splines.data.splines.new('BEZIER')
spline.bezier_points.add(len(sp) - 1) # less one because one point is added when the spline is created.
for p in range(0, len(sp)):
spline.bezier_points[p].co = [sp[p][0], sp[p][1], sp[p][2]]
-
-
+
+
bpy.ops.object.editmode_toggle('INVOKE_REGION_WIN')
-
+
bpy.ops.object.select_all('INVOKE_REGION_WIN', action='DESELECT')
bpy.data.objects[self.main_splines.name].select = True
bpy.context.scene.objects.active = bpy.data.objects[self.main_splines.name]
-
+
bpy.ops.object.editmode_toggle('INVOKE_REGION_WIN')
-
+
bpy.ops.curve.select_all('INVOKE_REGION_WIN', action='SELECT')
bpy.ops.curve.handle_type_set(type='VECTOR') # Important to make it vector first and then automatic, otherwise the tips handles get too big and distort the shrinkwrap results later.
bpy.ops.curve.handle_type_set('INVOKE_REGION_WIN', type='AUTOMATIC')
bpy.ops.curve.select_all('INVOKE_REGION_WIN', action='DESELECT')
bpy.ops.object.editmode_toggle('INVOKE_REGION_WIN')
-
-
+
+
self.main_splines.name = "SURFSKIO_temp_strokes"
-
-
+
+
if self.is_crosshatch:
strokes_for_crosshatch = True
strokes_for_rectangular_surface = False
else:
strokes_for_rectangular_surface = True
strokes_for_crosshatch = False
-
-
+
+
bpy.ops.object.select_all('INVOKE_REGION_WIN', action='DESELECT')
bpy.data.objects[self.main_object.name].select = True
bpy.context.scene.objects.active = bpy.data.objects[self.main_object.name]
-
+
bpy.ops.object.editmode_toggle('INVOKE_REGION_WIN')
-
-
+
+
if strokes_for_rectangular_surface:
self.rectangular_surface()
elif strokes_for_crosshatch:
self.crosshatch_surface_execute()
-
-
+
+
#### Delete main splines
bpy.ops.object.editmode_toggle('INVOKE_REGION_WIN')
-
+
bpy.ops.object.select_all('INVOKE_REGION_WIN', action='DESELECT')
bpy.data.objects[self.main_splines.name].select = True
bpy.context.scene.objects.active = bpy.data.objects[self.main_splines.name]
-
+
bpy.ops.object.delete()
-
+
bpy.ops.object.select_all('INVOKE_REGION_WIN', action='DESELECT')
bpy.data.objects[self.main_object.name].select = True
bpy.context.scene.objects.active = bpy.data.objects[self.main_object.name]
-
+
bpy.ops.object.editmode_toggle('INVOKE_REGION_WIN')
-
-
+
+
bpy.context.user_preferences.edit.use_global_undo = self.initial_global_undo_state
-
+
return{'FINISHED'}
-
-
-
+
+
+
def invoke(self, context, event):
self.initial_global_undo_state = bpy.context.user_preferences.edit.use_global_undo
-
+
self.main_object = bpy.context.scene.objects.active
self.main_object_selected_verts_count = int(self.main_object.data.total_vert_sel)
-
-
+
+
bpy.context.user_preferences.edit.use_global_undo = False
-
-
+
+
bpy.ops.wm.context_set_value(data_path='tool_settings.mesh_select_mode', value='True, False, False')
-
+
# Out Edit mode and In again to make sure the actual mesh selections are being taken.
bpy.ops.object.editmode_toggle('INVOKE_REGION_WIN')
bpy.ops.object.editmode_toggle('INVOKE_REGION_WIN')
-
-
-
+
+
+
self.cyclic_cross = bpy.context.scene.SURFSK_cyclic_cross
self.cyclic_follow = bpy.context.scene.SURFSK_cyclic_follow
self.automatic_join = bpy.context.scene.SURFSK_automatic_join
self.loops_on_strokes = bpy.context.scene.SURFSK_loops_on_strokes
self.keep_strokes = bpy.context.scene.SURFSK_keep_strokes
-
+
self.edges_U = 10
-
+
if self.loops_on_strokes:
self.edges_V = 3
else:
self.edges_V = 10
-
+
self.is_fill_faces = False
-
+
self.stopping_errors = False
-
+
self.last_strokes_splines_coords = []
-
-
+
+
#### Determine the type of the strokes.
self.strokes_type = get_strokes_type(self.main_object)
-
+
#### Check if it will be used grease pencil strokes or curves.
if self.strokes_type == "GP_STROKES" or self.strokes_type == "EXTERNAL_CURVE": # If there are strokes to be used.
if self.strokes_type == "GP_STROKES":
@@ -3001,138 +2999,138 @@ class GPENCIL_OT_SURFSK_add_surface(bpy.types.Operator):
if ob != bpy.context.scene.objects.active:
self.original_curve = ob
self.using_external_curves = True
-
+
bpy.ops.object.editmode_toggle('INVOKE_REGION_WIN')
-
-
+
+
#### Make sure there are no objects left from erroneous executions of this operator, with the reserved names used here.
for o in bpy.data.objects:
if o.name.find("SURFSKIO_") != -1:
bpy.ops.object.select_all('INVOKE_REGION_WIN', action='DESELECT')
bpy.data.objects[o.name].select = True
bpy.context.scene.objects.active = bpy.data.objects[o.name]
-
+
bpy.ops.object.delete()
-
-
+
+
bpy.ops.object.select_all('INVOKE_REGION_WIN', action='DESELECT')
bpy.data.objects[self.original_curve.name].select = True
bpy.context.scene.objects.active = bpy.data.objects[self.original_curve.name]
-
+
bpy.ops.object.duplicate('INVOKE_REGION_WIN')
-
-
+
+
self.temporary_curve = bpy.context.scene.objects.active
-
-
+
+
# Deselect all points of the curve
bpy.ops.object.editmode_toggle('INVOKE_REGION_WIN')
bpy.ops.curve.select_all('INVOKE_REGION_WIN', action='DESELECT')
bpy.ops.object.editmode_toggle('INVOKE_REGION_WIN')
-
-
-
+
+
+
# Delete splines with only a single isolated point.
for i in range(len(self.temporary_curve.data.splines)):
sp = self.temporary_curve.data.splines[i]
-
+
if len(sp.bezier_points) == 1:
sp.bezier_points[0].select_control_point = True
-
+
bpy.ops.object.editmode_toggle('INVOKE_REGION_WIN')
bpy.ops.curve.delete(type='VERT')
bpy.ops.object.editmode_toggle('INVOKE_REGION_WIN')
-
-
+
+
bpy.ops.object.select_all('INVOKE_REGION_WIN', action='DESELECT')
bpy.data.objects[self.temporary_curve.name].select = True
bpy.context.scene.objects.active = bpy.data.objects[self.temporary_curve.name]
-
+
#### Set a minimum number of points for crosshatch
minimum_points_num = 15
-
+
bpy.ops.object.editmode_toggle('INVOKE_REGION_WIN')
# Check if the number of points of each curve has at least the number of points of minimum_points_num, which is a bit more than the face-loops limit. If not, subdivide to reach at least that number of ponts.
for i in range(len(self.temporary_curve.data.splines)):
sp = self.temporary_curve.data.splines[i]
-
+
if len(sp.bezier_points) < minimum_points_num:
for bp in sp.bezier_points:
bp.select_control_point = True
-
+
if (len(sp.bezier_points) - 1) != 0:
subdivide_cuts = int((minimum_points_num - len(sp.bezier_points)) / (len(sp.bezier_points) - 1)) + 1 # Formula to get the number of cuts that will make a curve of N number of points have near to "minimum_points_num" points, when subdividing with this number of cuts.
else:
subdivide_cuts = 0
-
-
+
+
bpy.ops.curve.subdivide('INVOKE_REGION_WIN', number_cuts = subdivide_cuts)
bpy.ops.curve.select_all('INVOKE_REGION_WIN', action='DESELECT')
-
+
bpy.ops.object.editmode_toggle('INVOKE_REGION_WIN')
-
-
-
+
+
+
# Detect if the strokes are a crosshatch and do it if it is.
self.crosshatch_surface_invoke(self.temporary_curve)
-
-
-
+
+
+
if not self.is_crosshatch:
bpy.ops.object.select_all('INVOKE_REGION_WIN', action='DESELECT')
bpy.data.objects[self.temporary_curve.name].select = True
bpy.context.scene.objects.active = bpy.data.objects[self.temporary_curve.name]
-
+
bpy.ops.object.editmode_toggle('INVOKE_REGION_WIN')
-
+
#### Set a minimum number of points for rectangular surfaces.
minimum_points_num = 60
-
+
# Check if the number of points of each curve has at least the number of points of minimum_points_num, which is a bit more than the face-loops limit. If not, subdivide to reach at least that number of ponts.
for i in range(len(self.temporary_curve.data.splines)):
sp = self.temporary_curve.data.splines[i]
-
+
if len(sp.bezier_points) < minimum_points_num:
for bp in sp.bezier_points:
bp.select_control_point = True
-
+
if (len(sp.bezier_points) - 1) != 0:
subdivide_cuts = int((minimum_points_num - len(sp.bezier_points)) / (len(sp.bezier_points) - 1)) + 1 # Formula to get the number of cuts that will make a curve of N number of points have near to "minimum_points_num" points, when subdividing with this number of cuts.
else:
subdivide_cuts = 0
-
-
+
+
bpy.ops.curve.subdivide('INVOKE_REGION_WIN', number_cuts = subdivide_cuts)
bpy.ops.curve.select_all('INVOKE_REGION_WIN', action='DESELECT')
-
+
bpy.ops.object.editmode_toggle('INVOKE_REGION_WIN')
-
-
-
-
+
+
+
+
# Save coordinates of the actual strokes (as the "last saved splines").
for sp_idx in range(len(self.temporary_curve.data.splines)):
self.last_strokes_splines_coords.append([])
for bp_idx in range(len(self.temporary_curve.data.splines[sp_idx].bezier_points)):
coords = self.temporary_curve.matrix_world * self.temporary_curve.data.splines[sp_idx].bezier_points[bp_idx].co
self.last_strokes_splines_coords[sp_idx].append([coords[0], coords[1], coords[2]])
-
-
+
+
# Check for cyclic splines, put the first and last points in the middle of their actual positions.
for sp_idx in range(len(self.temporary_curve.data.splines)):
if self.temporary_curve.data.splines[sp_idx].use_cyclic_u == True:
first_p_co = self.last_strokes_splines_coords[sp_idx][0]
last_p_co = self.last_strokes_splines_coords[sp_idx][len(self.last_strokes_splines_coords[sp_idx]) - 1]
-
+
target_co = [(first_p_co[0] + last_p_co[0]) / 2, (first_p_co[1] + last_p_co[1]) / 2, (first_p_co[2] + last_p_co[2]) / 2]
-
+
self.last_strokes_splines_coords[sp_idx][0] = target_co
self.last_strokes_splines_coords[sp_idx][len(self.last_strokes_splines_coords[sp_idx]) - 1] = target_co
-
+
tuple(self.last_strokes_splines_coords)
-
-
-
+
+
+
# Estimation of the average length of the segments between each point of the grease pencil strokes. Will be useful to determine whether a curve should be made "Cyclic".
segments_lengths_sum = 0
segments_count = 0
@@ -3141,95 +3139,95 @@ class GPENCIL_OT_SURFSK_add_surface(bpy.types.Operator):
if i != 0 and len(random_spline) - 1 >= i:
segments_lengths_sum += (random_spline[i - 1].co - random_spline[i].co).length
segments_count += 1
-
+
self.average_gp_segment_length = segments_lengths_sum / segments_count
-
-
+
+
#### Delete temporary strokes curve object
bpy.ops.object.select_all('INVOKE_REGION_WIN', action='DESELECT')
bpy.data.objects[self.temporary_curve.name].select = True
bpy.context.scene.objects.active = bpy.data.objects[self.temporary_curve.name]
-
+
bpy.ops.object.delete()
-
-
+
+
bpy.ops.object.select_all('INVOKE_REGION_WIN', action='DESELECT')
bpy.data.objects[self.main_object.name].select = True
bpy.context.scene.objects.active = bpy.data.objects[self.main_object.name]
-
+
bpy.ops.object.editmode_toggle('INVOKE_REGION_WIN')
-
-
+
+
self.execute(context)
bpy.context.user_preferences.edit.use_global_undo = False # Set again since "execute()" will turn it again to its initial value.
-
-
- #### If "Keep strokes" option is not active, delete original strokes curve object.
+
+
+ #### If "Keep strokes" option is not active, delete original strokes curve object.
if (not self.stopping_errors and not self.keep_strokes) or self.is_crosshatch:
bpy.ops.object.editmode_toggle('INVOKE_REGION_WIN')
bpy.ops.object.select_all('INVOKE_REGION_WIN', action='DESELECT')
bpy.data.objects[self.original_curve.name].select = True
bpy.context.scene.objects.active = bpy.data.objects[self.original_curve.name]
-
+
bpy.ops.object.delete()
-
+
bpy.ops.object.select_all('INVOKE_REGION_WIN', action='DESELECT')
bpy.data.objects[self.main_object.name].select = True
bpy.context.scene.objects.active = bpy.data.objects[self.main_object.name]
-
+
bpy.ops.object.editmode_toggle('INVOKE_REGION_WIN')
-
-
-
+
+
+
#### Delete grease pencil strokes.
if self.strokes_type == "GP_STROKES" and not self.stopping_errors:
bpy.ops.gpencil.active_frame_delete('INVOKE_REGION_WIN')
-
-
+
+
bpy.context.user_preferences.edit.use_global_undo = self.initial_global_undo_state
-
-
+
+
if not self.stopping_errors:
return {"FINISHED"}
else:
return{"CANCELLED"}
-
+
elif self.strokes_type == "SELECTION_ALONE":
self.is_fill_faces = True
-
+
created_faces_count = self.fill_with_faces(self.main_object)
-
+
bpy.ops.object.editmode_toggle('INVOKE_REGION_WIN')
-
+
if created_faces_count == 0:
self.report({'WARNING'}, "There aren't any strokes.")
return {"CANCELLED"}
else:
return {"FINISHED"}
-
-
-
-
+
+
+
+
elif self.strokes_type == "EXTERNAL_NO_CURVE":
self.report({'WARNING'}, "The secondary object is not a Curve.")
return{"CANCELLED"}
-
+
elif self.strokes_type == "MORE_THAN_ONE_EXTERNAL":
self.report({'WARNING'}, "There shouldn't be more than one secondary object selected.")
return{"CANCELLED"}
-
+
elif self.strokes_type == "SINGLE_GP_STROKE_NO_SELECTION" or self.strokes_type == "SINGLE_CURVE_STROKE_NO_SELECTION":
self.report({'WARNING'}, "It's needed at least one stroke and one selection, or two strokes.")
return{"CANCELLED"}
-
+
elif self.strokes_type == "NO_STROKES":
self.report({'WARNING'}, "There aren't any strokes.")
return{"CANCELLED"}
-
+
elif self.strokes_type == "CURVE_WITH_NON_BEZIER_SPLINES":
self.report({'WARNING'}, "All splines must be Bezier.")
return{"CANCELLED"}
-
+
else:
return{"CANCELLED"}
@@ -3239,8 +3237,8 @@ class GPENCIL_OT_SURFSK_edit_strokes(bpy.types.Operator):
bl_idname = "gpencil.surfsk_edit_strokes"
bl_label = "Bsurfaces edit strokes"
bl_description = "Edit the grease pencil strokes or curves used."
-
-
+
+
def execute(self, context):
#### Determine the type of the strokes.
self.strokes_type = get_strokes_type(self.main_object)
@@ -3250,13 +3248,13 @@ class GPENCIL_OT_SURFSK_edit_strokes(bpy.types.Operator):
for ob in selected_objs:
if ob != bpy.context.scene.objects.active:
curve_ob = ob
-
+
bpy.ops.object.editmode_toggle('INVOKE_REGION_WIN')
-
+
bpy.ops.object.select_all('INVOKE_REGION_WIN', action='DESELECT')
bpy.data.objects[curve_ob.name].select = True
bpy.context.scene.objects.active = bpy.data.objects[curve_ob.name]
-
+
bpy.ops.object.editmode_toggle('INVOKE_REGION_WIN')
elif self.strokes_type == "GP_STROKES" or self.strokes_type == "SINGLE_GP_STROKE_NO_SELECTION":
#### Convert grease pencil strokes to curve.
@@ -3265,29 +3263,29 @@ class GPENCIL_OT_SURFSK_edit_strokes(bpy.types.Operator):
for ob in bpy.context.selected_objects:
if ob != bpy.context.scene.objects.active and ob.name.startswith("GP_Layer"):
ob_gp_strokes = ob
-
+
#ob_gp_strokes = bpy.context.object
-
+
#### Delete grease pencil strokes.
bpy.ops.object.select_all('INVOKE_REGION_WIN', action='DESELECT')
bpy.data.objects[self.main_object.name].select = True
bpy.context.scene.objects.active = bpy.data.objects[self.main_object.name]
-
+
bpy.ops.gpencil.active_frame_delete('INVOKE_REGION_WIN')
-
-
+
+
#### Clean up curves.
bpy.ops.object.select_all('INVOKE_REGION_WIN', action='DESELECT')
bpy.data.objects[ob_gp_strokes.name].select = True
bpy.context.scene.objects.active = bpy.data.objects[ob_gp_strokes.name]
-
+
curve_crv = ob_gp_strokes.data
bpy.ops.object.editmode_toggle('INVOKE_REGION_WIN')
bpy.ops.curve.spline_type_set('INVOKE_REGION_WIN', type="BEZIER")
bpy.ops.curve.handle_type_set('INVOKE_REGION_WIN', type="AUTOMATIC")
bpy.data.curves[curve_crv.name].show_handles = False
bpy.data.curves[curve_crv.name].show_normal_face = False
-
+
elif self.strokes_type == "EXTERNAL_NO_CURVE":
self.report({'WARNING'}, "The secondary object is not a Curve.")
return{"CANCELLED"}
@@ -3299,14 +3297,14 @@ class GPENCIL_OT_SURFSK_edit_strokes(bpy.types.Operator):
return{"CANCELLED"}
else:
return{"CANCELLED"}
-
-
-
+
+
+
def invoke (self, context, event):
self.main_object = bpy.context.object
-
+
self.execute(context)
-
+
return {"FINISHED"}
@@ -3317,9 +3315,9 @@ class CURVE_OT_SURFSK_reorder_splines(bpy.types.Operator):
bl_label = "Bsurfaces reorder splines"
bl_description = "Defines the order of the splines by using grease pencil strokes."
bl_options = {'REGISTER', 'UNDO'}
-
-
-
+
+
+
def execute(self, context):
objects_to_delete = []
#### Convert grease pencil strokes to curve.
@@ -3328,320 +3326,320 @@ class CURVE_OT_SURFSK_reorder_splines(bpy.types.Operator):
for ob in bpy.context.selected_objects:
if ob != bpy.context.scene.objects.active and ob.name.startswith("GP_Layer"):
GP_strokes_curve = ob
-
+
#GP_strokes_curve = bpy.context.object
objects_to_delete.append(GP_strokes_curve)
-
+
bpy.ops.object.select_all('INVOKE_REGION_WIN', action='DESELECT')
bpy.data.objects[GP_strokes_curve.name].select = True
bpy.context.scene.objects.active = bpy.data.objects[GP_strokes_curve.name]
-
-
+
+
bpy.ops.object.editmode_toggle('INVOKE_REGION_WIN')
bpy.ops.curve.select_all('INVOKE_REGION_WIN', action='SELECT')
bpy.ops.curve.subdivide('INVOKE_REGION_WIN', number_cuts = 100)
bpy.ops.object.editmode_toggle('INVOKE_REGION_WIN')
-
+
bpy.ops.object.duplicate('INVOKE_REGION_WIN')
GP_strokes_mesh = bpy.context.object
objects_to_delete.append(GP_strokes_mesh)
-
+
GP_strokes_mesh.data.resolution_u = 1
bpy.ops.object.convert(target='MESH', keep_original=False)
-
-
+
+
bpy.ops.object.select_all('INVOKE_REGION_WIN', action='DESELECT')
bpy.data.objects[self.main_curve.name].select = True
bpy.context.scene.objects.active = bpy.data.objects[self.main_curve.name]
-
+
bpy.ops.object.duplicate('INVOKE_REGION_WIN')
curves_duplicate_1 = bpy.context.object
objects_to_delete.append(curves_duplicate_1)
-
-
-
+
+
+
minimum_points_num = 500
-
-
+
+
for x in range(round(minimum_points_num / 100)): # Some iterations since the subdivision operator has a limit of 100 subdivisions per iteration.
#### Check if the number of points of each curve has at least the number of points of minimum_points_num. If not, subdivide to reach at least that number of ponts.
for i in range(len(curves_duplicate_1.data.splines)):
sp = curves_duplicate_1.data.splines[i]
-
+
if len(sp.bezier_points) < minimum_points_num:
for bp in sp.bezier_points:
bp.select_control_point = True
-
+
if (len(sp.bezier_points) - 1) != 0:
subdivide_cuts = int((minimum_points_num - len(sp.bezier_points)) / (len(sp.bezier_points) - 1)) + 1 # Formula to get the number of cuts that will make a curve of N number of points have near to "minimum_points_num" points, when subdividing with this number of cuts.
else:
subdivide_cuts = 0
-
+
bpy.ops.object.editmode_toggle('INVOKE_REGION_WIN')
bpy.ops.curve.subdivide('INVOKE_REGION_WIN', number_cuts = subdivide_cuts)
bpy.ops.curve.select_all('INVOKE_REGION_WIN', action='DESELECT')
bpy.ops.object.editmode_toggle('INVOKE_REGION_WIN')
-
-
+
+
bpy.ops.object.duplicate('INVOKE_REGION_WIN')
curves_duplicate_2 = bpy.context.object
objects_to_delete.append(curves_duplicate_2)
-
-
+
+
#### Duplicate the duplicate and add Shrinkwrap to it, with the grease pencil strokes curve as target.
bpy.ops.object.select_all('INVOKE_REGION_WIN', action='DESELECT')
bpy.data.objects[curves_duplicate_2.name].select = True
bpy.context.scene.objects.active = bpy.data.objects[curves_duplicate_2.name]
-
+
bpy.ops.object.modifier_add('INVOKE_REGION_WIN', type='SHRINKWRAP')
curves_duplicate_2.modifiers["Shrinkwrap"].wrap_method = "NEAREST_VERTEX"
curves_duplicate_2.modifiers["Shrinkwrap"].target = GP_strokes_mesh
bpy.ops.object.modifier_apply('INVOKE_REGION_WIN', apply_as='DATA', modifier='Shrinkwrap')
-
-
+
+
#### Get the distance of each vert from its original position to its position with Shrinkwrap.
nearest_points_coords = {}
for st_idx in range(len(curves_duplicate_1.data.splines)):
for bp_idx in range(len(curves_duplicate_1.data.splines[st_idx].bezier_points)):
bp_1_co = curves_duplicate_1.matrix_world * curves_duplicate_1.data.splines[st_idx].bezier_points[bp_idx].co
bp_2_co = curves_duplicate_2.matrix_world * curves_duplicate_2.data.splines[st_idx].bezier_points[bp_idx].co
-
+
if bp_idx == 0:
shortest_dist = (bp_1_co - bp_2_co).length
nearest_points_coords[st_idx] = ("%.4f" % bp_2_co[0], "%.4f" % bp_2_co[1], "%.4f" % bp_2_co[2])
-
+
dist = (bp_1_co - bp_2_co).length
-
+
if dist < shortest_dist:
nearest_points_coords[st_idx] = ("%.4f" % bp_2_co[0], "%.4f" % bp_2_co[1], "%.4f" % bp_2_co[2])
shortest_dist = dist
-
-
-
+
+
+
#### Get all coords of GP strokes points, for comparison.
GP_strokes_coords = []
for st_idx in range(len(GP_strokes_curve.data.splines)):
GP_strokes_coords.append([("%.4f" % x if "%.4f" % x != "-0.00" else "0.00", "%.4f" % y if "%.4f" % y != "-0.00" else "0.00", "%.4f" % z if "%.4f" % z != "-0.00" else "0.00") for x, y, z in [bp.co for bp in GP_strokes_curve.data.splines[st_idx].bezier_points]])
-
-
+
+
#### Check the point of the GP strokes with the same coords as the nearest points of the curves (with shrinkwrap).
GP_connection_points = {} # Dictionary with GP stroke index as index, and a list as value. The list has as index the point index of the GP stroke nearest to the spline, and as value the spline index.
for gp_st_idx in range(len(GP_strokes_coords)):
GPvert_spline_relationship = {}
-
+
for splines_st_idx in range(len(nearest_points_coords)):
if nearest_points_coords[splines_st_idx] in GP_strokes_coords[gp_st_idx]:
GPvert_spline_relationship[GP_strokes_coords[gp_st_idx].index(nearest_points_coords[splines_st_idx])] = splines_st_idx
-
-
+
+
GP_connection_points[gp_st_idx] = GPvert_spline_relationship
-
-
+
+
#### Get the splines new order.
splines_new_order = []
for i in GP_connection_points:
dict_keys = sorted(GP_connection_points[i].keys()) # Sort dictionaries by key
-
+
for k in dict_keys:
splines_new_order.append(GP_connection_points[i][k])
-
-
-
+
+
+
#### Reorder.
-
+
curve_original_name = self.main_curve.name
-
+
bpy.ops.object.select_all('INVOKE_REGION_WIN', action='DESELECT')
bpy.data.objects[self.main_curve.name].select = True
bpy.context.scene.objects.active = bpy.data.objects[self.main_curve.name]
-
+
self.main_curve.name = "SURFSKIO_CRV_ORD"
-
+
bpy.ops.object.editmode_toggle('INVOKE_REGION_WIN')
bpy.ops.curve.select_all('INVOKE_REGION_WIN', action='DESELECT')
bpy.ops.object.editmode_toggle('INVOKE_REGION_WIN')
-
-
+
+
for sp_idx in range(len(self.main_curve.data.splines)):
self.main_curve.data.splines[0].bezier_points[0].select_control_point = True
-
+
bpy.ops.object.editmode_toggle('INVOKE_REGION_WIN')
bpy.ops.curve.separate('INVOKE_REGION_WIN')
bpy.ops.object.editmode_toggle('INVOKE_REGION_WIN')
-
-
-
+
+
+
#### Get the names of the separated splines objects in the original order.
splines_unordered = {}
for o in bpy.data.objects:
if o.name.find("SURFSKIO_CRV_ORD") != -1:
spline_order_string = o.name.partition(".")[2]
-
+
if spline_order_string != "" and int(spline_order_string) > 0:
spline_order_index = int(spline_order_string) - 1
splines_unordered[spline_order_index] = o.name
-
-
-
+
+
+
#### Join all splines objects in final order.
for order_idx in splines_new_order:
bpy.ops.object.select_all('INVOKE_REGION_WIN', action='DESELECT')
bpy.data.objects[splines_unordered[order_idx]].select = True
bpy.data.objects["SURFSKIO_CRV_ORD"].select = True
bpy.context.scene.objects.active = bpy.data.objects["SURFSKIO_CRV_ORD"]
-
+
bpy.ops.object.join('INVOKE_REGION_WIN')
-
-
+
+
#### Go back to the original name of the curves object.
bpy.context.object.name = curve_original_name
-
-
+
+
#### Delete all unused objects.
for o in objects_to_delete:
bpy.ops.object.select_all('INVOKE_REGION_WIN', action='DESELECT')
bpy.data.objects[o.name].select = True
bpy.context.scene.objects.active = bpy.data.objects[o.name]
-
+
bpy.ops.object.delete()
-
-
+
+
bpy.ops.object.select_all('INVOKE_REGION_WIN', action='DESELECT')
bpy.data.objects[curve_original_name].select = True
bpy.context.scene.objects.active = bpy.data.objects[curve_original_name]
-
+
bpy.ops.object.editmode_toggle('INVOKE_REGION_WIN')
bpy.ops.curve.select_all('INVOKE_REGION_WIN', action='DESELECT')
-
-
+
+
bpy.ops.gpencil.active_frame_delete('INVOKE_REGION_WIN')
-
-
-
+
+
+
return {"FINISHED"}
-
-
-
+
+
+
def invoke (self, context, event):
self.main_curve = bpy.context.object
-
-
+
+
there_are_GP_strokes = False
try:
#### Get the active grease pencil layer.
strokes_num = len(self.main_curve.grease_pencil.layers.active.active_frame.strokes)
-
+
if strokes_num > 0:
there_are_GP_strokes = True
except:
pass
-
-
+
+
if there_are_GP_strokes:
self.execute(context)
self.report({'INFO'}, "Splines have been reordered.")
else:
self.report({'WARNING'}, "Draw grease pencil strokes to connect splines.")
-
+
return {"FINISHED"}
-
-
-
+
+
+
class CURVE_OT_SURFSK_first_points(bpy.types.Operator):
bl_idname = "curve.surfsk_first_points"
bl_label = "Bsurfaces set first points"
bl_description = "Set the selected points as the first point of each spline."
bl_options = {'REGISTER', 'UNDO'}
-
-
-
+
+
+
def execute(self, context):
splines_to_invert = []
-
+
#### Check non-cyclic splines to invert.
for i in range(len(self.main_curve.data.splines)):
b_points = self.main_curve.data.splines[i].bezier_points
-
+
if not i in self.cyclic_splines: # Only for non-cyclic splines
if b_points[len(b_points) - 1].select_control_point:
splines_to_invert.append(i)
-
-
+
+
#### Reorder points of cyclic splines, and set all handles to "Automatic".
-
+
# Check first selected point.
cyclic_splines_new_first_pt = {}
for i in self.cyclic_splines:
sp = self.main_curve.data.splines[i]
-
+
for t in range(len(sp.bezier_points)):
bp = sp.bezier_points[t]
if bp.select_control_point or bp.select_right_handle or bp.select_left_handle:
cyclic_splines_new_first_pt[i] = t
break # To take only one if there are more.
-
+
# Reorder.
for spline_idx in cyclic_splines_new_first_pt:
sp = self.main_curve.data.splines[spline_idx]
-
+
spline_old_coords = []
for bp_old in sp.bezier_points:
coords = (bp_old.co[0], bp_old.co[1], bp_old.co[2])
-
+
left_handle_type = str(bp_old.handle_left_type)
left_handle_length = float(bp_old.handle_left.length)
left_handle_xyz = (float(bp_old.handle_left.x), float(bp_old.handle_left.y), float(bp_old.handle_left.z))
-
+
right_handle_type = str(bp_old.handle_right_type)
right_handle_length = float(bp_old.handle_right.length)
right_handle_xyz = (float(bp_old.handle_right.x), float(bp_old.handle_right.y), float(bp_old.handle_right.z))
-
+
spline_old_coords.append([coords, left_handle_type, right_handle_type, left_handle_length, right_handle_length, left_handle_xyz, right_handle_xyz])
-
-
+
+
for t in range(len(sp.bezier_points)):
bp = sp.bezier_points
-
+
if t + cyclic_splines_new_first_pt[spline_idx] + 1 <= len(bp) - 1:
new_index = t + cyclic_splines_new_first_pt[spline_idx] + 1
else:
new_index = t + cyclic_splines_new_first_pt[spline_idx] + 1 - len(bp)
-
+
bp[t].co = mathutils.Vector(spline_old_coords[new_index][0])
-
+
bp[t].handle_left.length = spline_old_coords[new_index][3]
bp[t].handle_right.length = spline_old_coords[new_index][4]
-
+
bp[t].handle_left_type = "FREE"
bp[t].handle_right_type = "FREE"
-
+
bp[t].handle_left.x = spline_old_coords[new_index][5][0]
bp[t].handle_left.y = spline_old_coords[new_index][5][1]
bp[t].handle_left.z = spline_old_coords[new_index][5][2]
-
+
bp[t].handle_right.x = spline_old_coords[new_index][6][0]
bp[t].handle_right.y = spline_old_coords[new_index][6][1]
bp[t].handle_right.z = spline_old_coords[new_index][6][2]
-
+
bp[t].handle_left_type = spline_old_coords[new_index][1]
bp[t].handle_right_type = spline_old_coords[new_index][2]
-
-
-
+
+
+
#### Invert the non-cyclic splines designated above.
for i in range(len(splines_to_invert)):
bpy.ops.curve.select_all('INVOKE_REGION_WIN', action='DESELECT')
-
+
bpy.ops.object.editmode_toggle('INVOKE_REGION_WIN')
self.main_curve.data.splines[splines_to_invert[i]].bezier_points[0].select_control_point = True
bpy.ops.object.editmode_toggle('INVOKE_REGION_WIN')
-
+
bpy.ops.curve.switch_direction()
-
+
bpy.ops.curve.select_all('INVOKE_REGION_WIN', action='DESELECT')
-
-
+
+
#### Keep selected the first vert of each spline.
bpy.ops.object.editmode_toggle('INVOKE_REGION_WIN')
for i in range(len(self.main_curve.data.splines)):
@@ -3649,43 +3647,43 @@ class CURVE_OT_SURFSK_first_points(bpy.types.Operator):
bp = self.main_curve.data.splines[i].bezier_points[0]
else:
bp = self.main_curve.data.splines[i].bezier_points[len(self.main_curve.data.splines[i].bezier_points) - 1]
-
+
bp.select_control_point = True
bp.select_right_handle = True
bp.select_left_handle = True
bpy.ops.object.editmode_toggle('INVOKE_REGION_WIN')
-
-
-
-
+
+
+
+
return {'FINISHED'}
-
-
-
+
+
+
def invoke (self, context, event):
self.main_curve = bpy.context.object
-
+
# Check if all curves are Bezier, and detect which ones are cyclic.
self.cyclic_splines = []
for i in range(len(self.main_curve.data.splines)):
if self.main_curve.data.splines[i].type != "BEZIER":
self.report({'WARNING'}, 'All splines must be Bezier type.')
-
+
return {'CANCELLED'}
else:
if self.main_curve.data.splines[i].use_cyclic_u:
self.cyclic_splines.append(i)
-
-
-
+
+
+
self.execute(context)
self.report({'INFO'}, "First points have been set.")
-
+
return {'FINISHED'}
-
-
-
-
+
+
+
+
def register():
bpy.utils.register_class(VIEW3D_PT_tools_SURFSK_mesh)
bpy.utils.register_class(VIEW3D_PT_tools_SURFSK_curve)
@@ -3693,29 +3691,29 @@ def register():
bpy.utils.register_class(GPENCIL_OT_SURFSK_edit_strokes)
bpy.utils.register_class(CURVE_OT_SURFSK_reorder_splines)
bpy.utils.register_class(CURVE_OT_SURFSK_first_points)
-
-
-
+
+
+
bpy.types.Scene.SURFSK_cyclic_cross = bpy.props.BoolProperty(
name="Cyclic Cross",
description="Make cyclic the face-loops crossing the strokes.",
default=False)
-
+
bpy.types.Scene.SURFSK_cyclic_follow = bpy.props.BoolProperty(
name="Cyclic Follow",
description="Make cyclic the face-loops following the strokes.",
default=False)
-
+
bpy.types.Scene.SURFSK_keep_strokes = bpy.props.BoolProperty(
name="Keep strokes",
description="Keeps the sketched strokes or curves after adding the surface.",
default=False)
-
+
bpy.types.Scene.SURFSK_automatic_join = bpy.props.BoolProperty(
name="Automatic join",
description="Join automatically vertices of either surfaces generated by crosshatching, or from the borders of closed shapes.",
default=True)
-
+
bpy.types.Scene.SURFSK_loops_on_strokes = bpy.props.BoolProperty(
name="Loops on strokes",
description="Make the loops match the paths of the strokes.",
@@ -3727,7 +3725,7 @@ def register():
default=2,
min=1,
max=100)
-
+
def unregister():
bpy.utils.unregister_class(VIEW3D_PT_tools_SURFSK_mesh)
@@ -3736,18 +3734,18 @@ def unregister():
bpy.utils.unregister_class(GPENCIL_OT_SURFSK_edit_strokes)
bpy.utils.unregister_class(CURVE_OT_SURFSK_reorder_splines)
bpy.utils.unregister_class(CURVE_OT_SURFSK_first_points)
-
+
del bpy.types.Scene.SURFSK_precision
del bpy.types.Scene.SURFSK_keep_strokes
del bpy.types.Scene.SURFSK_automatic_join
del bpy.types.Scene.SURFSK_cyclic_cross
del bpy.types.Scene.SURFSK_cyclic_follow
del bpy.types.Scene.SURFSK_loops_on_strokes
-
+
if __name__ == "__main__":
register()
-
-
+
+
diff --git a/mesh_f2.py b/mesh_f2.py
index d95d4cec..e75785cb 100644
--- a/mesh_f2.py
+++ b/mesh_f2.py
@@ -27,10 +27,9 @@ bl_info = {
'location': "Editmode > F",
'warning': "",
'description': "Extends the 'Make Edge/Face' functionality",
- 'wiki_url': "http://wiki.blender.org/index.php/Extensions:2.6/Py/"\
+ 'wiki_url': "http://wiki.blender.org/index.php/Extensions:2.6/Py/"
"Scripts/Modeling/F2",
- 'tracker_url': "http://projects.blender.org/tracker/index.php?"\
- "func=detail&aid=33979",
+ 'tracker_url': "https://developer.blender.org/T33979",
'category': 'Mesh'}
@@ -316,4 +315,4 @@ def unregister():
if __name__ == "__main__":
- register() \ No newline at end of file
+ register()
diff --git a/mesh_inset/__init__.py b/mesh_inset/__init__.py
index 8d4054d3..fa3f7973 100644
--- a/mesh_inset/__init__.py
+++ b/mesh_inset/__init__.py
@@ -26,12 +26,12 @@ bl_info = {
"location": "View3D > Tools",
"description": "Make an inset polygon inside selection.",
"warning": "",
- "wiki_url": \
- "http://wiki.blender.org/index.php/Extensions:2.6/Py/Scripts/Modeling/Inset-Polygon",
- "tracker_url": \
- "http://projects.blender.org/tracker/index.php?func=detail&aid=27290&group_id=153&atid=469",
+ "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/"
+ "Scripts/Modeling/Inset-Polygon",
+ "tracker_url": "https://developer.blender.org/T27290",
"category": "Mesh"}
+
if "bpy" in locals():
import imp
else:
diff --git a/mesh_looptools.py b/mesh_looptools.py
index cdbc26fd..5bfc832f 100644
--- a/mesh_looptools.py
+++ b/mesh_looptools.py
@@ -25,9 +25,8 @@ bl_info = {
"warning": "",
"description": "Mesh modelling toolkit. Several tools to aid modelling",
"wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/"
- "Scripts/Modeling/LoopTools",
- "tracker_url": "http://projects.blender.org/tracker/index.php?"
- "func=detail&aid=26189",
+ "Scripts/Modeling/LoopTools",
+ "tracker_url": "https://developer.blender.org/T26189",
"category": "Mesh"}
@@ -79,7 +78,7 @@ def cache_read(tool, object, bm, input_method, boundaries):
loops = looptools_cache[tool]["loops"]
derived = looptools_cache[tool]["derived"]
mapping = looptools_cache[tool]["mapping"]
-
+
return(True, single_loops, loops, derived, mapping)
@@ -140,7 +139,7 @@ def calculate_cubic_splines(bm_mod, tknots, knots):
else:
circular = False
# end of hack
-
+
n = len(knots)
if n < 2:
return False
@@ -187,7 +186,7 @@ def calculate_cubic_splines(bm_mod, tknots, knots):
if circular: # cleaning up after hack
knots = knots[4:-4]
tknots = tknots[4:-4]
-
+
return(splines)
@@ -201,7 +200,7 @@ def calculate_linear_splines(bm_mod, tknots, knots):
t = tknots[i]
u = tknots[i+1]-t
splines.append([a, d, t, u]) # [locStart, locDif, tStart, tDif]
-
+
return(splines)
@@ -209,14 +208,14 @@ def calculate_linear_splines(bm_mod, tknots, knots):
def calculate_plane(bm_mod, loop, method="best_fit", object=False):
# getting the vertex locations
locs = [bm_mod.verts[v].co.copy() for v in loop[0]]
-
+
# calculating the center of masss
com = mathutils.Vector()
for loc in locs:
com += loc
com /= len(locs)
x, y, z = com
-
+
if method == 'best_fit':
# creating the covariance matrix
mat = mathutils.Matrix(((0.0, 0.0, 0.0),
@@ -233,7 +232,7 @@ def calculate_plane(bm_mod, loop, method="best_fit", object=False):
mat[0][2] += (loc[2]-z)*(loc[0]-x)
mat[1][2] += (loc[2]-z)*(loc[1]-y)
mat[2][2] += (loc[2]-z)**2
-
+
# calculating the normal to the plane
normal = False
try:
@@ -266,7 +265,7 @@ def calculate_plane(bm_mod, loop, method="best_fit", object=False):
if vec2.length == 0:
vec2 = mathutils.Vector((1.0, 1.0, 1.0))
normal = vec2
-
+
elif method == 'normal':
# averaging the vertex normals
v_normals = [bm_mod.verts[v].normal for v in loop[0]]
@@ -275,7 +274,7 @@ def calculate_plane(bm_mod, loop, method="best_fit", object=False):
normal += v_normal
normal /= len(v_normals)
normal.normalize()
-
+
elif method == 'view':
# calculate view normal
rotation = bpy.context.space_data.region_3d.view_matrix.to_3x3().\
@@ -284,7 +283,7 @@ def calculate_plane(bm_mod, loop, method="best_fit", object=False):
if object:
normal = object.matrix_world.inverted().to_euler().to_matrix() * \
normal
-
+
return(com, normal)
@@ -294,7 +293,7 @@ def calculate_splines(interpolation, bm_mod, tknots, knots):
splines = calculate_cubic_splines(bm_mod, tknots, knots[:])
else: # interpolations == 'linear'
splines = calculate_linear_splines(bm_mod, tknots, knots[:])
-
+
return(splines)
@@ -322,10 +321,10 @@ def check_loops(loops, mapping, bm_mod):
stacked = False
break
if stacked:
- continue
+ continue
# passed all tests, loop is valid
valid_loops.append([loop, circular])
-
+
return(valid_loops)
@@ -338,7 +337,7 @@ def dict_edge_faces(bm):
continue
for key in face_edgekeys(face):
edge_faces[key].append(face.index)
-
+
return(edge_faces)
@@ -346,7 +345,7 @@ def dict_edge_faces(bm):
def dict_face_faces(bm, edge_faces=False):
if not edge_faces:
edge_faces = dict_edge_faces(bm)
-
+
connected_faces = dict([[face.index, []] for face in bm.faces if \
not face.hide])
for face in bm.faces:
@@ -357,7 +356,7 @@ def dict_face_faces(bm, edge_faces=False):
if connected_face == face.index:
continue
connected_faces[face.index].append(connected_face)
-
+
return(connected_faces)
@@ -370,7 +369,7 @@ def dict_vert_edges(bm):
ek = edgekey(edge)
for vert in ek:
vert_edges[vert].append(ek)
-
+
return(vert_edges)
@@ -381,7 +380,7 @@ def dict_vert_faces(bm):
if not face.hide:
for vert in face.verts:
vert_faces[vert.index].append(face.index)
-
+
return(vert_faces)
@@ -395,7 +394,7 @@ def dict_vert_verts(edge_keys):
vert_verts[ek[i]].append(ek[1-i])
else:
vert_verts[ek[i]] = [ek[1-i]]
-
+
return(vert_verts)
@@ -414,18 +413,18 @@ def face_edgekeys(face):
def get_connected_input(object, bm, scene, input):
# get mesh with modifiers applied
derived, bm_mod = get_derived_bmesh(object, bm, scene)
-
+
# calculate selected loops
edge_keys = [edgekey(edge) for edge in bm_mod.edges if \
edge.select and not edge.hide]
loops = get_connected_selections(edge_keys)
-
+
# if only selected loops are needed, we're done
if input == 'selected':
return(derived, bm_mod, loops)
- # elif input == 'all':
+ # elif input == 'all':
loops = get_parallel_loops(bm_mod, loops)
-
+
return(derived, bm_mod, loops)
@@ -433,14 +432,14 @@ def get_connected_input(object, bm, scene, input):
def get_connected_selections(edge_keys):
# create connection data
vert_verts = dict_vert_verts(edge_keys)
-
+
# find loops consisting of connected selected edges
loops = []
while len(vert_verts) > 0:
loop = [iter(vert_verts.keys()).__next__()]
growing = True
flipped = False
-
+
# extend loop
while growing:
# no more connection data for current vertex
@@ -474,7 +473,7 @@ def get_connected_selections(edge_keys):
# found both ends of the loop, stop growing
else:
growing = False
-
+
# check if loop is circular
if loop[0] in vert_verts:
if loop[-1] in vert_verts[loop[0]]:
@@ -494,9 +493,9 @@ def get_connected_selections(edge_keys):
else:
# not circular
loop = [loop, False]
-
+
loops.append(loop)
-
+
return(loops)
@@ -523,7 +522,7 @@ def get_derived_bmesh(object, bm, scene):
else:
derived = False
bm_mod = bm
-
+
return(derived, bm_mod)
@@ -531,12 +530,12 @@ def get_derived_bmesh(object, bm, scene):
def get_mapping(derived, bm, bm_mod, single_vertices, full_search, loops):
if not derived:
return(False)
-
+
if full_search:
verts = [v for v in bm.verts if not v.hide]
else:
verts = [v for v in bm.verts if v.select and not v.hide]
-
+
# non-selected vertices around single vertices also need to be mapped
if single_vertices:
mapping = dict([[vert, -1] for vert in single_vertices])
@@ -547,7 +546,7 @@ def get_mapping(derived, bm, bm_mod, single_vertices, full_search, loops):
mapping[v_mod.index] = v.index
break
real_singles = [v_real for v_real in mapping.values() if v_real>-1]
-
+
verts_indices = [vert.index for vert in verts]
for face in [face for face in bm.faces if not face.select \
and not face.hide]:
@@ -558,7 +557,7 @@ def get_mapping(derived, bm, bm_mod, single_vertices, full_search, loops):
if v not in verts:
verts.append(v)
break
-
+
# create mapping of derived indices to indices
mapping = dict([[vert, -1] for loop in loops for vert in loop[0]])
if single_vertices:
@@ -571,7 +570,7 @@ def get_mapping(derived, bm, bm_mod, single_vertices, full_search, loops):
mapping[v_mod.index] = v.index
verts_mod.remove(v_mod)
break
-
+
return(mapping)
@@ -593,7 +592,7 @@ def matrix_invert(m):
m[0][2]*m[1][0] - m[0][0]*m[1][2]),
(m[1][0]*m[2][1] - m[1][1]*m[2][0], m[0][1]*m[2][0] - m[0][0]*m[2][1],
m[0][0]*m[1][1] - m[0][1]*m[1][0])))
-
+
return (r * (1 / matrix_determinant(m)))
@@ -613,7 +612,7 @@ def get_parallel_loops(bm_mod, loops):
# variables to keep track while iterating
all_edgeloops = []
has_branches = False
-
+
for loop in edgeloops:
# initialise with original loop
all_edgeloops.append(loop[0])
@@ -624,7 +623,7 @@ def get_parallel_loops(bm_mod, loops):
verts_used.append(edge[0])
if edge[1] not in verts_used:
verts_used.append(edge[1])
-
+
# find parallel loops
while len(newloops) > 0:
side_a = []
@@ -663,18 +662,18 @@ def get_parallel_loops(bm_mod, loops):
break
forbidden_side = "b"
continue
-
+
if has_branches:
# weird input with branches
break
-
+
newloops.pop(-1)
sides = []
if side_a:
sides.append(side_a)
if side_b:
sides.append(side_b)
-
+
for side in sides:
extraloop = []
for fi in side:
@@ -690,11 +689,11 @@ def get_parallel_loops(bm_mod, loops):
verts_used.append(new_vert)
newloops.append(extraloop)
all_edgeloops.append(extraloop)
-
+
# input contains branches, only return selected loop
if has_branches:
return(loops)
-
+
# change edgeloops into normal loops
loops = []
for edgeloop in all_edgeloops:
@@ -723,7 +722,7 @@ def get_parallel_loops(bm_mod, loops):
else:
circular = False
loops.append([loop, circular])
-
+
return(loops)
@@ -737,7 +736,7 @@ def initialise():
bpy.ops.object.mode_set(mode='OBJECT')
bpy.ops.object.mode_set(mode='EDIT')
bm = bmesh.from_edit_mesh(object.data)
-
+
return(global_undo, object, bm)
@@ -759,7 +758,7 @@ def move_verts(object, bm, mapping, move, influence):
object.data.update()
-# load custom tool settings
+# load custom tool settings
def settings_load(self):
lt = bpy.context.window_manager.looptools
tool = self.name.split()[0].lower()
@@ -795,7 +794,7 @@ def terminate(global_undo):
def bridge_calculate_cubic_spline(bm, coordinates):
result = []
x = [0, 1, 2, 3]
-
+
for j in range(3):
a = []
for i in coordinates:
@@ -830,13 +829,13 @@ def bridge_calculate_cubic_spline(bm, coordinates):
return(spline)
-# return a list with new vertex location vectors, a list with face vertex
+# return a list with new vertex location vectors, a list with face vertex
# integers, and the highest vertex integer in the virtual mesh
def bridge_calculate_geometry(bm, lines, vertex_normals, segments,
interpolation, cubic_strength, min_width, max_vert_index):
new_verts = []
faces = []
-
+
# calculate location based on interpolation method
def get_location(line, segment, splines):
v1 = bm.verts[lines[line][0]].co
@@ -852,7 +851,7 @@ interpolation, cubic_strength, min_width, max_vert_index):
az,bz,cz,dz,tz = splines[line][2]
z = az+bz*m+cz*m**2+dz*m**3
return mathutils.Vector((x, y, z))
-
+
# no interpolation needed
if segments == 1:
for i, line in enumerate(lines):
@@ -872,7 +871,7 @@ interpolation, cubic_strength, min_width, max_vert_index):
v2+size*vertex_normals[line[1]]]))
else:
splines = False
-
+
# create starting situation
virtual_width = [(bm.verts[lines[i][0]].co -
bm.verts[lines[i+1][0]].co).length for i
@@ -881,13 +880,13 @@ interpolation, cubic_strength, min_width, max_vert_index):
segments)]
first_line_indices = [i for i in range(max_vert_index+1,
max_vert_index+segments)]
-
+
prev_verts = new_verts[:] # vertex locations of verts on previous line
prev_vert_indices = first_line_indices[:]
max_vert_index += segments - 1 # highest vertex index in virtual mesh
next_verts = [] # vertex locations of verts on current line
next_vert_indices = []
-
+
for i, line in enumerate(lines):
if i < len(lines)-1:
v1 = line[0]
@@ -924,12 +923,12 @@ interpolation, cubic_strength, min_width, max_vert_index):
next_vert_indices.append(max_vert_index)
if end_face:
faces.append([v1, v2, lines[i+1][1], line[1]])
-
+
prev_verts = next_verts[:]
prev_vert_indices = next_vert_indices[:]
next_verts = []
next_vert_indices = []
-
+
return(new_verts, faces, max_vert_index)
@@ -940,7 +939,7 @@ def bridge_calculate_lines(bm, loops, mode, twist, reverse):
loop1_circular, loop2_circular = [i[1] for i in loops]
circular = loop1_circular or loop2_circular
circle_full = False
-
+
# calculate loop centers
centers = []
for loop in [loop1, loop2]:
@@ -956,7 +955,7 @@ def bridge_calculate_lines(bm, loops, mode, twist, reverse):
centers[i] += mathutils.Vector((0.01, 0, 0))
break
center1, center2 = centers
-
+
# calculate the normals of the virtual planes that the loops are on
normals = []
normal_plurity = False
@@ -1012,7 +1011,7 @@ def bridge_calculate_lines(bm, loops, mode, twist, reverse):
if ((center2 + normals[1]) - center1).length > \
((center2 - normals[1]) - center1).length:
normals[1].negate()
-
+
# rotation matrix, representing the difference between the plane normals
axis = normals[0].cross(normals[1])
axis = mathutils.Vector([loc if abs(loc) > 1e-8 else 0 for loc in axis])
@@ -1020,14 +1019,14 @@ def bridge_calculate_lines(bm, loops, mode, twist, reverse):
axis.negate()
angle = normals[0].dot(normals[1])
rotation_matrix = mathutils.Matrix.Rotation(angle, 4, axis)
-
+
# if circular, rotate loops so they are aligned
if circular:
# make sure loop1 is the circular one (or both are circular)
if loop2_circular and not loop1_circular:
loop1_circular, loop2_circular = True, False
loop1, loop2 = loop2, loop1
-
+
# match start vertex of loop1 with loop2
target_vector = bm.verts[loop2[0]].co - center2
dif_angles = [[(rotation_matrix * (bm.verts[vertex].co - center1)
@@ -1041,7 +1040,7 @@ def bridge_calculate_lines(bm, loops, mode, twist, reverse):
angle, distance, index in dif_angles if angle <= angle_limit]
dif_angles.sort()
loop1 = loop1[dif_angles[0][2]:] + loop1[:dif_angles[0][2]]
-
+
# have both loops face the same way
if normal_plurity and not circular:
second_to_first, second_to_second, second_to_last = \
@@ -1071,7 +1070,7 @@ def bridge_calculate_lines(bm, loops, mode, twist, reverse):
loop1.reverse()
if circular:
loop1 = [loop1[-1]] + loop1[:-1]
-
+
# both loops have the same length
if len(loop1) == len(loop2):
# manual override
@@ -1080,25 +1079,25 @@ def bridge_calculate_lines(bm, loops, mode, twist, reverse):
loop1 = loop1[twist:]+loop1[:twist]
if reverse:
loop1.reverse()
-
+
lines.append([loop1[0], loop2[0]])
for i in range(1, len(loop1)):
lines.append([loop1[i], loop2[i]])
-
+
# loops of different lengths
else:
# make loop1 longest loop
if len(loop2) > len(loop1):
loop1, loop2 = loop2, loop1
loop1_circular, loop2_circular = loop2_circular, loop1_circular
-
+
# manual override
if twist:
if abs(twist) < len(loop1):
loop1 = loop1[twist:]+loop1[:twist]
if reverse:
loop1.reverse()
-
+
# shortest angle difference doesn't always give correct start vertex
if loop1_circular and not loop2_circular:
shifting = 1
@@ -1115,7 +1114,7 @@ def bridge_calculate_lines(bm, loops, mode, twist, reverse):
else:
shifting = False
break
-
+
# basic shortest side first
if mode == 'basic':
lines.append([loop1[0], loop2[0]])
@@ -1126,7 +1125,7 @@ def bridge_calculate_lines(bm, loops, mode, twist, reverse):
else:
# quads
lines.append([loop1[i], loop2[i]])
-
+
# shortest edge algorithm
else: # mode == 'shortest'
lines.append([loop1[0], loop2[0]])
@@ -1151,7 +1150,7 @@ def bridge_calculate_lines(bm, loops, mode, twist, reverse):
tri, quad = [(bm.verts[loop1[i+1]].co -
bm.verts[loop2[j]].co).length
for j in range(prev_vert2, prev_vert2+2)]
-
+
# triangle
if tri < quad:
lines.append([loop1[i+1], loop2[prev_vert2]])
@@ -1166,11 +1165,11 @@ def bridge_calculate_lines(bm, loops, mode, twist, reverse):
lines.append([loop1[i+1], loop2[0]])
prev_vert2 = 0
circle_full = True
-
+
# final face for circular loops
if loop1_circular and loop2_circular:
lines.append([loop1[0], loop2[0]])
-
+
return(lines)
@@ -1179,22 +1178,22 @@ def bridge_calculate_segments(bm, lines, loops, segments):
# return if amount of segments is set by user
if segments != 0:
return segments
-
+
# edge lengths
average_edge_length = [(bm.verts[vertex].co - \
bm.verts[loop[0][i+1]].co).length for loop in loops for \
i, vertex in enumerate(loop[0][:-1])]
# closing edges of circular loops
average_edge_length += [(bm.verts[loop[0][-1]].co - \
- bm.verts[loop[0][0]].co).length for loop in loops if loop[1]]
-
+ bm.verts[loop[0][0]].co).length for loop in loops if loop[1]]
+
# average lengths
average_edge_length = sum(average_edge_length) / len(average_edge_length)
average_bridge_length = sum([(bm.verts[v1].co - \
bm.verts[v2].co).length for v1, v2 in lines]) / len(lines)
-
+
segments = max(1, round(average_bridge_length / average_edge_length))
-
+
return(segments)
@@ -1203,7 +1202,7 @@ def bridge_calculate_virtual_vertex_normals(bm, lines, loops, edge_faces,
edgekey_to_edge):
if not edge_faces: # interpolation isn't set to cubic
return False
-
+
# pity reduce() isn't one of the basic functions in python anymore
def average_vector_dictionary(dic):
for key, vectors in dic.items():
@@ -1215,7 +1214,7 @@ edgekey_to_edge):
average /= len(vectors)
dic[key] = [average]
return dic
-
+
# get all edges of the loop
edges = [[edgekey_to_edge[tuple(sorted([loops[j][0][i],
loops[j][0][i+1]]))] for i in range(len(loops[j][0])-1)] for \
@@ -1225,17 +1224,17 @@ edgekey_to_edge):
if loops[j][1]: # circular
edges.append(edgekey_to_edge[tuple(sorted([loops[j][0][0],
loops[j][0][-1]]))])
-
+
"""
calculation based on face topology (assign edge-normals to vertices)
-
+
edge_normal = face_normal x edge_vector
vertex_normal = average(edge_normals)
"""
vertex_normals = dict([(vertex, []) for vertex in loops[0][0]+loops[1][0]])
for edge in edges:
faces = edge_faces[edgekey(edge)] # valid faces connected to edge
-
+
if faces:
# get edge coordinates
v1, v2 = [bm.verts[edgekey(edge)[i]].co for i in [0,1]]
@@ -1244,7 +1243,7 @@ edgekey_to_edge):
# zero-length edge, vertices at same location
continue
edge_center = (v1 + v2) / 2
-
+
# average face coordinates, if connected to more than 1 valid face
if len(faces) > 1:
face_normal = mathutils.Vector()
@@ -1260,7 +1259,7 @@ edgekey_to_edge):
if face_normal.length < 1e-4:
# faces with a surface of 0 have no face normal
continue
-
+
# calculate virtual edge normal
edge_normal = edge_vector.cross(face_normal)
edge_normal.length = 0.01
@@ -1272,17 +1271,17 @@ edgekey_to_edge):
# add virtual edge normal as entry for both vertices it connects
for vertex in edgekey(edge):
vertex_normals[vertex].append(edge_normal)
-
- """
- calculation based on connection with other loop (vertex focused method)
+
+ """
+ calculation based on connection with other loop (vertex focused method)
- used for vertices that aren't connected to any valid faces
-
+
plane_normal = edge_vector x connection_vector
vertex_normal = plane_normal x edge_vector
"""
vertices = [vertex for vertex, normal in vertex_normals.items() if not \
normal]
-
+
if vertices:
# edge vectors connected to vertices
edge_vectors = dict([[vertex, []] for vertex in vertices])
@@ -1295,7 +1294,7 @@ edgekey_to_edge):
# zero-length edge, vertices at same location
continue
edge_vectors[v].append(edge_vector)
-
+
# connection vectors between vertices of both loops
connection_vectors = dict([[vertex, []] for vertex in vertices])
connections = dict([[vertex, []] for vertex in vertices])
@@ -1315,14 +1314,14 @@ edgekey_to_edge):
connection_vectors = average_vector_dictionary(connection_vectors)
connection_vectors = dict([[vertex, vector[0]] if vector else \
[vertex, []] for vertex, vector in connection_vectors.items()])
-
+
for vertex, values in edge_vectors.items():
# vertex normal doesn't matter, just assign a random vector to it
if not connection_vectors[vertex]:
vertex_normals[vertex] = [mathutils.Vector((1, 0, 0))]
continue
-
- # calculate to what location the vertex is connected,
+
+ # calculate to what location the vertex is connected,
# used to determine what way to flip the normal
connected_center = mathutils.Vector()
for v in connections[vertex]:
@@ -1333,7 +1332,7 @@ edgekey_to_edge):
# shouldn't be possible, but better safe than sorry
vertex_normals[vertex] = [mathutils.Vector((1, 0, 0))]
continue
-
+
# can't do proper calculations, because of zero-length vector
if not values:
if (connected_center - (bm.verts[vertex].co + \
@@ -1344,7 +1343,7 @@ edgekey_to_edge):
vertex_normals[vertex] = [connection_vectors[vertex].\
normalized()]
continue
-
+
# calculate vertex normals using edge-vectors,
# connection-vectors and the derived plane normal
for edge_vector in values:
@@ -1358,12 +1357,12 @@ edgekey_to_edge):
vertex_normal.negate()
vertex_normal.normalize()
vertex_normals[vertex].append(vertex_normal)
-
+
# average virtual vertex normals, based on all edges it's connected to
vertex_normals = average_vector_dictionary(vertex_normals)
vertex_normals = dict([[vertex, vector[0]] for vertex, vector in \
vertex_normals.items()])
-
+
return(vertex_normals)
@@ -1380,7 +1379,7 @@ def bridge_create_faces(object, bm, faces, twist):
[face.reverse() for face in faces]
faces = [face[2:]+face[:2] if face[0]==face[1] else face for \
face in faces]
-
+
# eekadoodle prevention
for i in range(len(faces)):
if not faces[i][-1]:
@@ -1391,7 +1390,7 @@ def bridge_create_faces(object, bm, faces, twist):
# result of converting from pre-bmesh period
if faces[i][-1] == faces[i][-2]:
faces[i] = faces[i][:-1]
-
+
new_faces = []
for i in range(len(faces)):
new_faces.append(bm.faces.new([bm.verts[v] for v in faces[i]]))
@@ -1413,12 +1412,12 @@ def bridge_get_input(bm):
else:
edge_count[ek] = 1
internal_edges = [ek for ek in edge_count if edge_count[ek] > 1]
-
+
# sort correct edges into loops
selected_edges = [edgekey(edge) for edge in bm.edges if edge.select \
and not edge.hide and edgekey(edge) not in internal_edges]
loops = get_connected_selections(selected_edges)
-
+
return(loops)
@@ -1441,18 +1440,18 @@ def bridge_initialise(bm, interpolation):
else:
edge_faces = False
edgekey_to_edge = False
-
+
# selected faces input
old_selected_faces = [face.index for face in bm.faces if face.select \
and not face.hide]
-
+
# find out if faces created by bridging should be smoothed
smooth = False
if bm.faces:
if sum([face.smooth for face in bm.faces])/len(bm.faces) \
>= 0.5:
smooth = True
-
+
return(edge_faces, edgekey_to_edge, old_selected_faces, smooth)
@@ -1466,7 +1465,7 @@ def bridge_input_method(loft, loft_loop):
method = "Loft no-loop"
else:
method = "Bridge"
-
+
return(method)
@@ -1483,7 +1482,7 @@ def bridge_match_loops(bm, loops):
center += bm.verts[vertex].co
normals.append(normal / len(vertices) / 10)
centers.append(center / len(vertices))
-
+
# possible matches if loop normals are faced towards the center
# of the other loop
matches = dict([[i, []] for i in range(len(loops))])
@@ -1504,7 +1503,7 @@ def bridge_match_loops(bm, loops):
matches[j].append([(centers[i] - centers[j]).length, j, i])
for key, value in matches.items():
value.sort()
-
+
# matches based on distance between centers and number of vertices in loops
new_order = []
for loop_index in range(len(loops)):
@@ -1523,11 +1522,11 @@ def bridge_match_loops(bm, loops):
if match[3] not in new_order:
new_order += [loop_index, match[3]]
break
-
+
# reorder loops based on matches
if len(new_order) >= 2:
loops = [loops[i] for i in new_order]
-
+
return(loops)
@@ -1538,7 +1537,7 @@ def bridge_remove_internal_faces(bm, old_selected_faces):
edges = collections.Counter([edge.index for face in remove_faces for \
edge in face.edges])
remove_edges = [bm.edges[edge] for edge in edges if edges[edge] > 1]
-
+
# remove internal faces and edges
for face in remove_faces:
bm.faces.remove(face)
@@ -1552,7 +1551,7 @@ def bridge_save_unused_faces(bm, old_selected_faces, loops):
vertex_to_face = dict([[i, []] for i in range(len(bm.verts))])
[[vertex_to_face[vertex.index].append(face) for vertex in \
bm.faces[face].verts] for face in old_selected_faces]
-
+
# group selected faces that are connected
groups = []
grouped_faces = []
@@ -1572,13 +1571,13 @@ def bridge_save_unused_faces(bm, old_selected_faces, loops):
group += vertex_face_group
new_faces.pop(0)
groups.append(group)
-
+
# key: vertex index, value: True/False (is it in a loop that is used)
used_vertices = dict([[i, 0] for i in range(len(bm.verts))])
for loop in loops:
for vertex in loop[0]:
used_vertices[vertex] = True
-
+
# check if group is bridged, if not remove faces from internal faces list
for group in groups:
used = False
@@ -1607,7 +1606,7 @@ def bridge_sort_loops(bm, loops, loft_loop):
x, y, z = [[sum([bm.verts[i].co[j] for i in loop[0]]) / \
len(loop[0]) for loop in loops] for j in range(3)]
nodes = [mathutils.Vector((x[i], y[i], z[i])) for i in range(len(loops))]
-
+
active_node = 0
open = [i for i in range(1, len(loops))]
path = [[0,0]]
@@ -1624,13 +1623,13 @@ def bridge_sort_loops(bm, loops, loft_loop):
path.reverse()
path = path[:-i] + temp
break
-
+
# reorder loops
loops = [loops[i[0]] for i in path]
# if requested, duplicate first loop at last position, so loft can loop
if loft_loop:
loops = loops + [loops[0]]
-
+
return(loops)
@@ -1641,10 +1640,10 @@ def bridge_update_old_selection(bm, old_selected_faces):
#for i, face in enumerate(bm.faces):
# if face.index in old_indices:
# old_selected_faces.append(i)
-
+
old_selected_faces = [i for i, face in enumerate(bm.faces) if face.index \
in old_selected_faces]
-
+
return(old_selected_faces)
@@ -1666,7 +1665,7 @@ def circle_3d_to_2d(bm_mod, loop, com, normal):
m = mathutils.Vector((normal[0], normal[1] + 1.0, normal[2]))
p = m - (m.dot(normal) * normal)
q = p.cross(normal)
-
+
# change to 2d coordinates using perpendicular projection
locs_2d = []
for loc, vert in verts_projected:
@@ -1674,7 +1673,7 @@ def circle_3d_to_2d(bm_mod, loop, com, normal):
x = p.dot(vloc) / p.dot(p)
y = q.dot(vloc) / q.dot(q)
locs_2d.append([x, y, vert])
-
+
return(locs_2d, p, q)
@@ -1684,7 +1683,7 @@ def circle_calculate_best_fit(locs_2d):
x0 = 0.0
y0 = 0.0
r = 1.0
-
+
# calculate center and radius (non-linear least squares solution)
for iter in range(500):
jmat = []
@@ -1720,7 +1719,7 @@ def circle_calculate_best_fit(locs_2d):
# stop iterating if we're close enough to optimal solution
if abs(dx0)<1e-6 and abs(dy0)<1e-6 and abs(dr)<1e-6:
break
-
+
# return center of circle and radius
return(x0, y0, r)
@@ -1733,7 +1732,7 @@ def circle_calculate_min_fit(locs_2d):
center = mathutils.Vector([x0, y0])
# radius of circle
r = min([(mathutils.Vector([i[0], i[1]])-center).length for i in locs_2d])
-
+
# return center of circle and radius
return(x0, y0, r)
@@ -1744,10 +1743,10 @@ def circle_calculate_verts(flatten, bm_mod, locs_2d, com, p, q, normal):
locs_3d = []
for loc in locs_2d:
locs_3d.append([loc[2], loc[0]*p + loc[1]*q + com])
-
+
if flatten: # flat circle
return(locs_3d)
-
+
else: # project the locations on the existing mesh
vert_edges = dict_vert_edges(bm_mod)
vert_faces = dict_vert_faces(bm_mod)
@@ -1828,7 +1827,7 @@ def circle_calculate_verts(flatten, bm_mod, locs_2d, com, p, q, normal):
# nothing to project on, remain at flat location
projection = loc[1]
new_locs.append([loc[0], projection])
-
+
# return new positions of projected circle
return(new_locs)
@@ -1873,7 +1872,7 @@ def circle_check_loops(single_loops, loops, mapping, bm_mod):
# passed all tests, loop is valid
valid_loops.append([loop, circular])
valid_single_loops[len(valid_loops)-1] = single_loops[i]
-
+
return(valid_single_loops, valid_loops)
@@ -1883,7 +1882,7 @@ def circle_flatten_singles(bm_mod, com, p, q, normal, single_loop):
for vert in single_loop:
loc = mathutils.Vector(bm_mod.verts[vert].co[:])
new_locs.append([vert, loc - (loc-com).dot(normal)*normal])
-
+
return(new_locs)
@@ -1891,7 +1890,7 @@ def circle_flatten_singles(bm_mod, com, p, q, normal, single_loop):
def circle_get_input(object, bm, scene):
# get mesh with modifiers applied
derived, bm_mod = get_derived_bmesh(object, bm, scene)
-
+
# create list of edge-keys based on selection state
faces = False
for face in bm.faces:
@@ -1914,7 +1913,7 @@ def circle_get_input(object, bm, scene):
# no faces, so no internal edges either
edge_keys = [edgekey(edge) for edge in bm_mod.edges if edge.select \
and not edge.hide]
-
+
# add edge-keys around single vertices
verts_connected = dict([[vert, 1] for edge in [edge for edge in \
bm_mod.edges if edge.select and not edge.hide] for vert in \
@@ -1922,7 +1921,7 @@ def circle_get_input(object, bm, scene):
single_vertices = [vert.index for vert in bm_mod.verts if \
vert.select and not vert.hide and not \
verts_connected.get(vert.index, False)]
-
+
if single_vertices and len(bm.faces)>0:
vert_to_single = dict([[v.index, []] for v in bm_mod.verts \
if not v.hide])
@@ -1939,10 +1938,10 @@ def circle_get_input(object, bm, scene):
if vert not in vert_to_single[ek[1]]:
vert_to_single[ek[1]].append(vert)
break
-
+
# sort edge-keys into loops
loops = get_connected_selections(edge_keys)
-
+
# find out to which loops the single vertices belong
single_loops = dict([[i, []] for i in range(len(loops))])
if single_vertices and len(bm.faces)>0:
@@ -1952,7 +1951,7 @@ def circle_get_input(object, bm, scene):
for single in vert_to_single[vert]:
if single not in single_loops[i]:
single_loops[i].append(single)
-
+
return(derived, bm_mod, single_vertices, single_loops, loops)
@@ -1964,7 +1963,7 @@ def circle_influence_locs(locs_2d, new_locs_2d, influence):
altx = newx*(influence/100)+ oldx*((100-influence)/100)
alty = newy*(influence/100)+ oldy*((100-influence)/100)
locs_2d[i] = [altx, alty, j]
-
+
return(locs_2d)
@@ -1975,7 +1974,7 @@ def circle_project_non_regular(locs_2d, x0, y0, r):
loc = mathutils.Vector([x-x0, y-y0])
loc.length = r
locs_2d[i] = [loc[0], loc[1], j]
-
+
return(locs_2d)
@@ -2001,7 +2000,7 @@ def circle_project_regular(locs_2d, x0, y0, r):
x = math.cos(t) * r
y = math.sin(t) * r
locs_2d[i] = [x, y, locs_2d[i][2]]
-
+
return(locs_2d)
@@ -2013,7 +2012,7 @@ def circle_shift_loop(bm_mod, loop, com):
distances.sort()
shift = distances[0][1]
loop = [verts[shift:] + verts[:shift], circular]
-
+
return(loop)
@@ -2094,7 +2093,7 @@ def curve_calculate_knots(loop, verts_selected):
knots.insert(0, loop[0][0])
if loop[0][-1] not in knots:
knots.append(loop[0][-1])
-
+
return(knots, points)
@@ -2103,7 +2102,7 @@ def curve_calculate_t(bm_mod, knots, points, pknots, regular, circular):
tpoints = []
loc_prev = False
len_total = 0
-
+
for p in points:
if p in knots:
loc = pknots[knots.index(p)] # use projected knot location
@@ -2120,7 +2119,7 @@ def curve_calculate_t(bm_mod, knots, points, pknots, regular, circular):
tknots.append(tpoints[points.index(p)])
if circular:
tknots[-1] = tpoints[-1]
-
+
# regular option
if regular:
tpoints_average = tpoints[-1] / (len(tpoints) - 1)
@@ -2130,8 +2129,8 @@ def curve_calculate_t(bm_mod, knots, points, pknots, regular, circular):
tknots[i] = tpoints[points.index(knots[i])]
if circular:
tknots[-1] = tpoints[-1]
-
-
+
+
return(tknots, tpoints)
@@ -2140,7 +2139,7 @@ def curve_calculate_vertices(bm_mod, knots, tknots, points, tpoints, splines,
interpolation, restriction):
newlocs = {}
move = []
-
+
for p in points:
if p in knots:
continue
@@ -2156,7 +2155,7 @@ interpolation, restriction):
n = len(splines) - 1
elif n < 0:
n = 0
-
+
if interpolation == 'cubic':
ax, bx, cx, dx, tx = splines[n][0]
x = ax + bx*(m-tx) + cx*(m-tx)**2 + dx*(m-tx)**3
@@ -2173,7 +2172,7 @@ interpolation, restriction):
newlocs[p] = newloc
else: # set the vertex to its new location
move.append([p, newloc])
-
+
if restriction != 'none': # vertex movement is restricted
for p in points:
if p in newlocs:
@@ -2212,7 +2211,7 @@ def curve_cut_boundaries(bm_mod, loops):
cut_loops.append([loop[first:], circular])
else:
cut_loops.append([loop[first:last], circular])
-
+
return(cut_loops)
@@ -2220,7 +2219,7 @@ def curve_cut_boundaries(bm_mod, loops):
def curve_get_input(object, bm, boundaries, scene):
# get mesh with modifiers applied
derived, bm_mod = get_derived_bmesh(object, bm, scene)
-
+
# vertices that still need a loop to run through it
verts_unsorted = [v.index for v in bm_mod.verts if \
v.select and not v.hide]
@@ -2228,13 +2227,13 @@ def curve_get_input(object, bm, boundaries, scene):
vert_edges = dict_vert_edges(bm_mod)
edge_faces = dict_edge_faces(bm_mod)
correct_loops = []
-
+
# find loops through each selected vertex
while len(verts_unsorted) > 0:
loops = curve_vertex_loops(bm_mod, verts_unsorted[0], vert_edges,
edge_faces)
verts_unsorted.pop(0)
-
+
# check if loop is fully selected
search_perpendicular = False
i = -1
@@ -2261,11 +2260,11 @@ def curve_get_input(object, bm, boundaries, scene):
else:
for loop, circular in loops:
correct_loops.append([loop, circular])
-
+
# boundaries option
if boundaries:
correct_loops = curve_cut_boundaries(bm_mod, correct_loops)
-
+
return(derived, bm_mod, correct_loops)
@@ -2282,7 +2281,7 @@ def curve_perpendicular_loops(bm_mod, start_loop, vert_edges, edge_faces):
continue
else:
perp_loops.append([loop, circular, loop.index(start_vert)])
-
+
# trim loops to same lengths
shortest = [[len(loop[0]), i] for i, loop in enumerate(perp_loops)\
if not loop[1]]
@@ -2320,7 +2319,7 @@ def curve_perpendicular_loops(bm_mod, start_loop, vert_edges, edge_faces):
start = max(0, loop[2] - before_start)
end = min(len(loop[0]), loop[2] + after_start + 1)
trimmed_loops.append([loop[0][start:end], False])
-
+
return(trimmed_loops)
@@ -2334,7 +2333,7 @@ def curve_project_knots(bm_mod, verts_selected, knots, points, circular):
v3 -= v1
p = v3.project(v2)
return(p + v1)
-
+
if circular: # project all knots
start = 0
end = len(knots)
@@ -2369,7 +2368,7 @@ def curve_project_knots(bm_mod, verts_selected, knots, points, circular):
pknots.append(mathutils.Vector(bm_mod.verts[knot].co[:]))
if not circular:
pknots.append(mathutils.Vector(bm_mod.verts[knots[-1]].co[:]))
-
+
return(pknots)
@@ -2377,7 +2376,7 @@ def curve_project_knots(bm_mod, verts_selected, knots, points, circular):
def curve_vertex_loops(bm_mod, start_vert, vert_edges, edge_faces):
edges_used = []
loops = []
-
+
for edge in vert_edges[start_vert]:
if edge in edges_used:
continue
@@ -2424,7 +2423,7 @@ def curve_vertex_loops(bm_mod, start_vert, vert_edges, edge_faces):
break
loop.reverse()
loops.append([loop, circular])
-
+
return(loops)
@@ -2437,11 +2436,11 @@ def flatten_get_input(bm):
vert_verts = dict_vert_verts([edgekey(edge) for edge in bm.edges \
if edge.select and not edge.hide])
verts = [v.index for v in bm.verts if v.select and not v.hide]
-
+
# no connected verts, consider all selected verts as a single input
if not vert_verts:
return([[verts, False]])
-
+
loops = []
while len(verts) > 0:
# start of loop
@@ -2462,7 +2461,7 @@ def flatten_get_input(bm):
to_grow += vert_verts[new_vert]
# add loop to loops
loops.append([loop, False])
-
+
return(loops)
@@ -2471,7 +2470,7 @@ def flatten_project(bm, loop, com, normal):
verts = [bm.verts[v] for v in loop[0]]
verts_projected = [[v.index, mathutils.Vector(v.co[:]) - \
(mathutils.Vector(v.co[:])-com).dot(normal)*normal] for v in verts]
-
+
return(verts_projected)
@@ -2492,32 +2491,32 @@ class gstretch_fake_stroke_point():
# flips loops, if necessary, to obtain maximum alignment to stroke
-def gstretch_align_pairs(ls_pairs, object, bm_mod, method):
+def gstretch_align_pairs(ls_pairs, object, bm_mod, method):
# returns total distance between all verts in loop and corresponding stroke
def distance_loop_stroke(loop, stroke, object, bm_mod, method):
stroke_lengths_cache = False
loop_length = len(loop[0])
total_distance = 0
-
+
if method != 'regular':
relative_lengths = gstretch_relative_lengths(loop, bm_mod)
-
+
for i, v_index in enumerate(loop[0]):
if method == 'regular':
relative_distance = i / (loop_length - 1)
else:
relative_distance = relative_lengths[i]
-
+
loc1 = object.matrix_world * bm_mod.verts[v_index].co
loc2, stroke_lengths_cache = gstretch_eval_stroke(stroke,
relative_distance, stroke_lengths_cache)
total_distance += (loc2 - loc1).length
-
+
return(total_distance)
-
+
if ls_pairs:
for (loop, stroke) in ls_pairs:
- distance_loop_stroke
+ distance_loop_stroke
total_dist = distance_loop_stroke(loop, stroke, object, bm_mod,
method)
loop[0].reverse()
@@ -2525,7 +2524,7 @@ def gstretch_align_pairs(ls_pairs, object, bm_mod, method):
method)
if total_dist_rev > total_dist:
loop[0].reverse()
-
+
return(ls_pairs)
@@ -2535,7 +2534,7 @@ def gstretch_calculate_verts(loop, stroke, object, bm_mod, method):
stroke_lengths_cache = False
loop_length = len(loop[0])
matrix_inverse = object.matrix_world.inverted()
-
+
# return intersection of line with stroke, or None
def intersect_line_stroke(vec1, vec2, stroke):
for i, p in enumerate(stroke.points[1:]):
@@ -2548,11 +2547,11 @@ def gstretch_calculate_verts(loop, stroke, object, bm_mod, method):
if -1 < dist < 1:
return(intersections[0])
return(None)
-
+
if method == 'project':
projection_vectors = []
vert_edges = dict_vert_edges(bm_mod)
-
+
for v_index in loop[0]:
for ek in vert_edges[v_index]:
v1, v2 = ek
@@ -2570,11 +2569,11 @@ def gstretch_calculate_verts(loop, stroke, object, bm_mod, method):
stroke)
if intersection:
move.append([v_index, matrix_inverse * intersection])
-
+
else:
if method == 'irregular':
relative_lengths = gstretch_relative_lengths(loop, bm_mod)
-
+
for i, v_index in enumerate(loop[0]):
if method == 'regular':
relative_distance = i / (loop_length - 1)
@@ -2584,7 +2583,7 @@ def gstretch_calculate_verts(loop, stroke, object, bm_mod, method):
relative_distance, stroke_lengths_cache)
loc = matrix_inverse * loc
move.append([v_index, loc])
-
+
return(move)
@@ -2595,7 +2594,7 @@ conversion_distance, conversion_max, conversion_min, conversion_vertices):
stroke_verts = []
mat_world = object.matrix_world.inverted()
singles = gstretch_match_single_verts(bm_mod, strokes, mat_world)
-
+
for stroke in strokes:
stroke_verts.append([stroke, []])
min_end_point = 0
@@ -2662,7 +2661,7 @@ conversion_distance, conversion_max, conversion_min, conversion_vertices):
if m_stroke != stroke:
continue
bm_mod.edges.new((vert, verts_seq[point]))
-
+
bmesh.update_edit_mesh(object.data)
return(move)
@@ -2704,7 +2703,7 @@ def gstretch_eval_stroke(stroke, distance, stroke_lengths_cache=False):
stroke_lengths_cache = [length / total_length for length in
lengths]
stroke_lengths = stroke_lengths_cache[:]
-
+
if distance in stroke_lengths:
loc = stroke.points[stroke_lengths.index(distance)].co
elif distance > stroke_lengths[-1]:
@@ -2722,7 +2721,7 @@ def gstretch_eval_stroke(stroke, distance, stroke_lengths_cache=False):
stroke.points[stroke_index-1].co
loc = stroke.points[stroke_index-1].co + \
distance_relative * interval_vector
-
+
return(loc, stroke_lengths_cache)
@@ -2751,7 +2750,7 @@ def gstretch_get_strokes(object):
strokes = frame.strokes
if len(strokes) < 1:
return(None)
-
+
return(strokes)
@@ -2759,7 +2758,7 @@ def gstretch_get_strokes(object):
def gstretch_match_loops_strokes(loops, strokes, object, bm_mod):
if not loops or not strokes:
return(None)
-
+
# calculate loop centers
loop_centers = []
for loop in loops:
@@ -2769,7 +2768,7 @@ def gstretch_match_loops_strokes(loops, strokes, object, bm_mod):
center /= len(loop[0])
center = object.matrix_world * center
loop_centers.append([center, loop])
-
+
# calculate stroke centers
stroke_centers = []
for stroke in strokes:
@@ -2778,7 +2777,7 @@ def gstretch_match_loops_strokes(loops, strokes, object, bm_mod):
center += p.co
center /= len(stroke.points)
stroke_centers.append([center, stroke, 0])
-
+
# match, first by stroke use count, then by distance
ls_pairs = []
for lc in loop_centers:
@@ -2789,7 +2788,7 @@ def gstretch_match_loops_strokes(loops, strokes, object, bm_mod):
best_stroke = distances[0][2]
ls_pairs.append([lc[1], stroke_centers[best_stroke][1]])
stroke_centers[best_stroke][2] += 1 # increase stroke use count
-
+
return(ls_pairs)
@@ -2801,7 +2800,7 @@ def gstretch_match_single_verts(bm_mod, strokes, mat_world):
for stroke in strokes:
endpoints.append((mat_world * stroke.points[0].co, stroke, 0))
endpoints.append((mat_world * stroke.points[-1].co, stroke, -1))
-
+
distances = []
# find single vertices (not connected to other selected verts)
for vert in bm_mod.verts:
@@ -2816,11 +2815,11 @@ def gstretch_match_single_verts(bm_mod, strokes, mat_world):
continue
# calculate distances from vertex to endpoints
distance = [((vert.co - loc).length, vert, stroke, stroke_point,
- endpoint_index) for endpoint_index, (loc, stroke, stroke_point) in
+ endpoint_index) for endpoint_index, (loc, stroke, stroke_point) in
enumerate(endpoints)]
distance.sort()
distances.append(distance[0])
-
+
# create matches, based on shortest distance first
singles = []
while distances:
@@ -2836,7 +2835,7 @@ def gstretch_match_single_verts(bm_mod, strokes, mat_world):
distance_new.sort()
distances_new.append(distance_new[0])
distances = distances_new
-
+
return(singles)
@@ -2849,7 +2848,7 @@ def gstretch_relative_lengths(loop, bm_mod):
total_length = max(lengths[-1], 1e-7)
relative_lengths = [length / total_length for length in
lengths]
-
+
return(relative_lengths)
@@ -2858,7 +2857,7 @@ def gstretch_safe_to_true_strokes(safe_strokes):
strokes = []
for safe_stroke in safe_strokes:
strokes.append(gstretch_fake_stroke(safe_stroke))
-
+
return(strokes)
@@ -2867,7 +2866,7 @@ def gstretch_true_to_safe_strokes(strokes):
safe_strokes = []
for stroke in strokes:
safe_strokes.append([p.co.copy() for p in stroke.points])
-
+
return(safe_strokes)
@@ -2940,7 +2939,7 @@ def relax_calculate_knots(loops):
all_knots.append(k)
for p in points:
all_points.append(p)
-
+
return(all_knots, all_points)
@@ -2978,7 +2977,7 @@ def relax_calculate_t(bm_mod, knots, points, regular):
tpoints.append((tknots[p] + tknots[p+1]) / 2)
all_tknots.append(tknots)
all_tpoints.append(tpoints)
-
+
return(all_tknots, all_tpoints)
@@ -3001,7 +3000,7 @@ points, splines):
n = len(splines[i]) - 1
elif n < 0:
n = 0
-
+
if interpolation == 'cubic':
ax, bx, cx, dx, tx = splines[i][n][0]
x = ax + bx*(m-tx) + cx*(m-tx)**2 + dx*(m-tx)**3
@@ -3017,7 +3016,7 @@ points, splines):
change.append([p, ((m-t)/u)*d + a])
for c in change:
move.append([c[0], (bm_mod.verts[c[0]].co + c[1]) / 2])
-
+
return(move)
@@ -3040,7 +3039,7 @@ def space_calculate_t(bm_mod, knots):
amount = len(knots)
t_per_segment = len_total / (amount - 1)
tpoints = [i * t_per_segment for i in range(amount)]
-
+
return(tknots, tpoints)
@@ -3061,7 +3060,7 @@ splines):
n = len(splines) - 1
elif n < 0:
n = 0
-
+
if interpolation == 'cubic':
ax, bx, cx, dx, tx = splines[n][0]
x = ax + bx*(m-tx) + cx*(m-tx)**2 + dx*(m-tx)**3
@@ -3073,7 +3072,7 @@ splines):
else: # interpolation == 'linear'
a, d, t, u = splines[n]
move.append([p, ((m-t)/u)*d + a])
-
+
return(move)
@@ -3087,7 +3086,7 @@ class Bridge(bpy.types.Operator):
bl_label = "Bridge / Loft"
bl_description = "Bridge two, or loft several, loops of vertices"
bl_options = {'REGISTER', 'UNDO'}
-
+
cubic_strength = bpy.props.FloatProperty(name = "Strength",
description = "Higher strength results in more fluid curves",
default = 1.0,
@@ -3135,16 +3134,16 @@ class Bridge(bpy.types.Operator):
twist = bpy.props.IntProperty(name = "Twist",
description = "Twist what vertices are connected to each other",
default = 0)
-
+
@classmethod
def poll(cls, context):
ob = context.active_object
return (ob and ob.type == 'MESH' and context.mode == 'EDIT_MESH')
-
+
def draw(self, context):
layout = self.layout
#layout.prop(self, "mode") # no cases yet where 'basic' mode is needed
-
+
# top row
col_top = layout.column(align=True)
row = col_top.row(align=True)
@@ -3164,26 +3163,26 @@ class Bridge(bpy.types.Operator):
col_top.prop(self, "remove_faces")
if self.loft:
col_top.prop(self, "loft_loop")
-
+
# override properties
col_top.separator()
row = layout.row(align = True)
row.prop(self, "twist")
row.prop(self, "reverse")
-
+
def invoke(self, context, event):
# load custom settings
context.window_manager.looptools.bridge_loft = self.loft
settings_load(self)
return self.execute(context)
-
+
def execute(self, context):
# initialise
global_undo, object, bm = initialise()
edge_faces, edgekey_to_edge, old_selected_faces, smooth = \
bridge_initialise(bm, self.interpolation)
settings_write(self)
-
+
# check cache to see if we can save time
input_method = bridge_input_method(self.loft, self.loft_loop)
cached, single_loops, loops, derived, mapping = cache_read("Bridge",
@@ -3198,7 +3197,7 @@ class Bridge(bpy.types.Operator):
loops = bridge_sort_loops(bm, loops, self.loft_loop)
else:
loops = bridge_match_loops(bm, loops)
-
+
# saving cache for faster execution next time
if not cached:
cache_write("Bridge", object, bm, input_method, False, False,
@@ -3248,10 +3247,10 @@ class Bridge(bpy.types.Operator):
bmesh.update_edit_mesh(object.data, tessface=False,
destructive=True)
bpy.ops.mesh.normals_make_consistent()
-
+
# cleaning up
terminate(global_undo)
-
+
return{'FINISHED'}
@@ -3261,7 +3260,7 @@ class Circle(bpy.types.Operator):
bl_label = "Circle"
bl_description = "Move selected vertices into a circle shape"
bl_options = {'REGISTER', 'UNDO'}
-
+
custom_radius = bpy.props.BoolProperty(name = "Radius",
description = "Force a custom radius",
default = False)
@@ -3290,19 +3289,19 @@ class Circle(bpy.types.Operator):
description = "Distribute vertices at constant distances along the " \
"circle",
default = True)
-
+
@classmethod
def poll(cls, context):
ob = context.active_object
return(ob and ob.type == 'MESH' and context.mode == 'EDIT_MESH')
-
+
def draw(self, context):
layout = self.layout
col = layout.column()
-
+
col.prop(self, "fit")
col.separator()
-
+
col.prop(self, "flatten")
row = col.row(align=True)
row.prop(self, "custom_radius")
@@ -3311,14 +3310,14 @@ class Circle(bpy.types.Operator):
row_right.prop(self, "radius", text="")
col.prop(self, "regular")
col.separator()
-
+
col.prop(self, "influence")
-
+
def invoke(self, context, event):
# load custom settings
settings_load(self)
return self.execute(context)
-
+
def execute(self, context):
# initialise
global_undo, object, bm = initialise()
@@ -3336,12 +3335,12 @@ class Circle(bpy.types.Operator):
False, loops)
single_loops, loops = circle_check_loops(single_loops, loops,
mapping, bm_mod)
-
+
# saving cache for faster execution next time
if not cached:
cache_write("Circle", object, bm, False, False, single_loops,
loops, derived, mapping)
-
+
move = []
for i, loop in enumerate(loops):
# best fitting flat plane
@@ -3374,15 +3373,15 @@ class Circle(bpy.types.Operator):
if self.flatten and single_loops:
move.append(circle_flatten_singles(bm_mod, com, p, q,
normal, single_loops[i]))
-
+
# move vertices to new locations
move_verts(object, bm, mapping, move, -1)
-
+
# cleaning up
if derived:
bm_mod.free()
terminate(global_undo)
-
+
return{'FINISHED'}
@@ -3392,7 +3391,7 @@ class Curve(bpy.types.Operator):
bl_label = "Curve"
bl_description = "Turn a loop into a smooth curve"
bl_options = {'REGISTER', 'UNDO'}
-
+
boundaries = bpy.props.BoolProperty(name = "Boundaries",
description = "Limit the tool to work within the boundaries of the "\
"selected vertices",
@@ -3421,29 +3420,29 @@ class Curve(bpy.types.Operator):
"extrusions)")),
description = "Restrictions on how the vertices can be moved",
default = 'none')
-
+
@classmethod
def poll(cls, context):
ob = context.active_object
return(ob and ob.type == 'MESH' and context.mode == 'EDIT_MESH')
-
+
def draw(self, context):
layout = self.layout
col = layout.column()
-
+
col.prop(self, "interpolation")
col.prop(self, "restriction")
col.prop(self, "boundaries")
col.prop(self, "regular")
col.separator()
-
+
col.prop(self, "influence")
-
+
def invoke(self, context, event):
# load custom settings
settings_load(self)
return self.execute(context)
-
+
def execute(self, context):
# initialise
global_undo, object, bm = initialise()
@@ -3461,12 +3460,12 @@ class Curve(bpy.types.Operator):
loops = check_loops(loops, mapping, bm_mod)
verts_selected = [v.index for v in bm_mod.verts if v.select \
and not v.hide]
-
+
# saving cache for faster execution next time
if not cached:
cache_write("Curve", object, bm, False, self.boundaries, False,
loops, derived, mapping)
-
+
move = []
for loop in loops:
knots, points = curve_calculate_knots(loop, verts_selected)
@@ -3479,11 +3478,11 @@ class Curve(bpy.types.Operator):
move.append(curve_calculate_vertices(bm_mod, knots, tknots,
points, tpoints, splines, self.interpolation,
self.restriction))
-
+
# move vertices to new locations
move_verts(object, bm, mapping, move, self.influence)
-
- # cleaning up
+
+ # cleaning up
if derived:
bm_mod.free()
terminate(global_undo)
@@ -3497,7 +3496,7 @@ class Flatten(bpy.types.Operator):
bl_label = "Flatten"
bl_description = "Flatten vertices on a best-fitting plane"
bl_options = {'REGISTER', 'UNDO'}
-
+
influence = bpy.props.FloatProperty(name = "Influence",
description = "Force of the tool",
default = 100.0,
@@ -3519,27 +3518,27 @@ class Flatten(bpy.types.Operator):
"movement inside the bounding box of the selection")),
description = "Restrictions on how the vertices can be moved",
default = 'none')
-
+
@classmethod
def poll(cls, context):
ob = context.active_object
return(ob and ob.type == 'MESH' and context.mode == 'EDIT_MESH')
-
+
def draw(self, context):
layout = self.layout
col = layout.column()
-
+
col.prop(self, "plane")
#col.prop(self, "restriction")
col.separator()
-
+
col.prop(self, "influence")
-
+
def invoke(self, context, event):
# load custom settings
settings_load(self)
return self.execute(context)
-
+
def execute(self, context):
# initialise
global_undo, object, bm = initialise()
@@ -3551,12 +3550,12 @@ class Flatten(bpy.types.Operator):
# order input into virtual loops
loops = flatten_get_input(bm)
loops = check_loops(loops, mapping, bm)
-
+
# saving cache for faster execution next time
if not cached:
cache_write("Flatten", object, bm, False, False, False, loops,
False, False)
-
+
move = []
for loop in loops:
# calculate plane and position of vertices on them
@@ -3568,10 +3567,10 @@ class Flatten(bpy.types.Operator):
else:
move.append(to_move)
move_verts(object, bm, False, move, self.influence)
-
+
# cleaning up
terminate(global_undo)
-
+
return{'FINISHED'}
@@ -3581,7 +3580,7 @@ class GStretch(bpy.types.Operator):
bl_label = "Gstretch"
bl_description = "Stretch selected vertices to Grease Pencil stroke"
bl_options = {'REGISTER', 'UNDO'}
-
+
conversion = bpy.props.EnumProperty(name = "Conversion",
items = (("distance", "Distance", "Set the distance between vertices "\
"of the converted grease pencil stroke"),
@@ -3626,7 +3625,7 @@ class GStretch(bpy.types.Operator):
delete_strokes = bpy.props.BoolProperty(name="Delete strokes",
description = "Remove Grease Pencil strokes if they have been used "\
"for Gstretch. WARNING: DOES NOT SUPPORT UNDO",
- default = False)
+ default = False)
influence = bpy.props.FloatProperty(name = "Influence",
description = "Force of the tool",
default = 100.0,
@@ -3644,20 +3643,20 @@ class GStretch(bpy.types.Operator):
description = "Method of distributing the vertices over the Grease "\
"Pencil stroke",
default = 'regular')
-
+
@classmethod
def poll(cls, context):
ob = context.active_object
return(ob and ob.type == 'MESH' and context.mode == 'EDIT_MESH')
-
+
def draw(self, context):
layout = self.layout
col = layout.column()
-
+
col.prop(self, "method")
col.prop(self, "delete_strokes")
col.separator()
-
+
col_conv = col.column(align=True)
col_conv.prop(self, "conversion", text="")
if self.conversion == 'distance':
@@ -3669,9 +3668,9 @@ class GStretch(bpy.types.Operator):
elif self.conversion == 'vertices':
col_conv.prop(self, "conversion_vertices")
col.separator()
-
+
col.prop(self, "influence")
-
+
def invoke(self, context, event):
# flush cached strokes
if 'Gstretch' in looptools_cache:
@@ -3679,12 +3678,12 @@ class GStretch(bpy.types.Operator):
# load custom settings
settings_load(self)
return self.execute(context)
-
+
def execute(self, context):
# initialise
global_undo, object, bm = initialise()
settings_write(self)
-
+
# check cache to see if we can save time
cached, safe_strokes, loops, derived, mapping = cache_read("Gstretch",
object, bm, False, False)
@@ -3709,7 +3708,7 @@ class GStretch(bpy.types.Operator):
strokes = gstretch_get_strokes(object)
else:
strokes = gstretch_get_fake_strokes(object, bm_mod, loops)
-
+
# saving cache for faster execution next time
if not cached:
if strokes:
@@ -3722,7 +3721,7 @@ class GStretch(bpy.types.Operator):
# pair loops and strokes
ls_pairs = gstretch_match_loops_strokes(loops, strokes, object, bm_mod)
ls_pairs = gstretch_align_pairs(ls_pairs, object, bm_mod, self.method)
-
+
move = []
if not loops:
# no selected geometry, convert GP to verts
@@ -3756,12 +3755,12 @@ class GStretch(bpy.types.Operator):
bmesh.update_edit_mesh(object.data, tessface=True,
destructive=True)
move_verts(object, bm, mapping, move, self.influence)
-
- # cleaning up
+
+ # cleaning up
if derived:
bm_mod.free()
terminate(global_undo)
-
+
return{'FINISHED'}
@@ -3771,7 +3770,7 @@ class Relax(bpy.types.Operator):
bl_label = "Relax"
bl_description = "Relax the loop, so it is smoother"
bl_options = {'REGISTER', 'UNDO'}
-
+
input = bpy.props.EnumProperty(name = "Input",
items = (("all", "Parallel (all)", "Also use non-selected "\
"parallel loops as input"),
@@ -3795,26 +3794,26 @@ class Relax(bpy.types.Operator):
description = "Distribute vertices at constant distances along the" \
"loop",
default = True)
-
+
@classmethod
def poll(cls, context):
ob = context.active_object
return(ob and ob.type == 'MESH' and context.mode == 'EDIT_MESH')
-
+
def draw(self, context):
layout = self.layout
col = layout.column()
-
+
col.prop(self, "interpolation")
col.prop(self, "input")
col.prop(self, "iterations")
col.prop(self, "regular")
-
+
def invoke(self, context, event):
# load custom settings
settings_load(self)
return self.execute(context)
-
+
def execute(self, context):
# initialise
global_undo, object, bm = initialise()
@@ -3831,12 +3830,12 @@ class Relax(bpy.types.Operator):
mapping = get_mapping(derived, bm, bm_mod, False, False, loops)
loops = check_loops(loops, mapping, bm_mod)
knots, points = relax_calculate_knots(loops)
-
+
# saving cache for faster execution next time
if not cached:
cache_write("Relax", object, bm, self.input, False, False, loops,
derived, mapping)
-
+
for iteration in range(int(self.iterations)):
# calculate splines and new positions
tknots, tpoints = relax_calculate_t(bm_mod, knots, points,
@@ -3848,12 +3847,12 @@ class Relax(bpy.types.Operator):
move = [relax_calculate_verts(bm_mod, self.interpolation,
tknots, knots, tpoints, points, splines)]
move_verts(object, bm, mapping, move, -1)
-
+
# cleaning up
if derived:
bm_mod.free()
terminate(global_undo)
-
+
return{'FINISHED'}
@@ -3863,7 +3862,7 @@ class Space(bpy.types.Operator):
bl_label = "Space"
bl_description = "Space the vertices in a regular distrubtion on the loop"
bl_options = {'REGISTER', 'UNDO'}
-
+
influence = bpy.props.FloatProperty(name = "Influence",
description = "Force of the tool",
default = 100.0,
@@ -3882,27 +3881,27 @@ class Space(bpy.types.Operator):
("linear", "Linear", "Vertices are projected on existing edges")),
description = "Algorithm used for interpolation",
default = 'cubic')
-
+
@classmethod
def poll(cls, context):
ob = context.active_object
return(ob and ob.type == 'MESH' and context.mode == 'EDIT_MESH')
-
+
def draw(self, context):
layout = self.layout
col = layout.column()
-
+
col.prop(self, "interpolation")
col.prop(self, "input")
col.separator()
-
+
col.prop(self, "influence")
-
+
def invoke(self, context, event):
# load custom settings
settings_load(self)
return self.execute(context)
-
+
def execute(self, context):
# initialise
global_undo, object, bm = initialise()
@@ -3918,12 +3917,12 @@ class Space(bpy.types.Operator):
context.scene, self.input)
mapping = get_mapping(derived, bm, bm_mod, False, False, loops)
loops = check_loops(loops, mapping, bm_mod)
-
+
# saving cache for faster execution next time
if not cached:
cache_write("Space", object, bm, self.input, False, False, loops,
derived, mapping)
-
+
move = []
for loop in loops:
# calculate splines and new positions
@@ -3936,12 +3935,12 @@ class Space(bpy.types.Operator):
tknots, tpoints, loop[0][:-1], splines))
# move vertices to new locations
move_verts(object, bm, mapping, move, self.influence)
-
+
# cleaning up
if derived:
bm_mod.free()
terminate(global_undo)
-
+
return{'FINISHED'}
@@ -3952,10 +3951,10 @@ class Space(bpy.types.Operator):
# menu containing all tools
class VIEW3D_MT_edit_mesh_looptools(bpy.types.Menu):
bl_label = "LoopTools"
-
+
def draw(self, context):
layout = self.layout
-
+
layout.operator("mesh.looptools_bridge", text="Bridge").loft = False
layout.operator("mesh.looptools_circle")
layout.operator("mesh.looptools_curve")
@@ -3979,7 +3978,7 @@ class VIEW3D_PT_tools_looptools(bpy.types.Panel):
layout = self.layout
col = layout.column(align=True)
lt = context.window_manager.looptools
-
+
# bridge - first line
split = col.split(percentage=0.15, align=True)
if lt.display_bridge:
@@ -3991,7 +3990,7 @@ class VIEW3D_PT_tools_looptools(bpy.types.Panel):
if lt.display_bridge:
box = col.column(align=True).box().column()
#box.prop(self, "mode")
-
+
# top row
col_top = box.column(align=True)
row = col_top.row(align=True)
@@ -4009,13 +4008,13 @@ class VIEW3D_PT_tools_looptools(bpy.types.Panel):
bottom_right.prop(lt, "bridge_cubic_strength")
# boolean properties
col_top.prop(lt, "bridge_remove_faces")
-
+
# override properties
col_top.separator()
row = box.row(align = True)
row.prop(lt, "bridge_twist")
row.prop(lt, "bridge_reverse")
-
+
# circle - first line
split = col.split(percentage=0.15, align=True)
if lt.display_circle:
@@ -4028,7 +4027,7 @@ class VIEW3D_PT_tools_looptools(bpy.types.Panel):
box = col.column(align=True).box().column()
box.prop(lt, "circle_fit")
box.separator()
-
+
box.prop(lt, "circle_flatten")
row = box.row(align=True)
row.prop(lt, "circle_custom_radius")
@@ -4037,9 +4036,9 @@ class VIEW3D_PT_tools_looptools(bpy.types.Panel):
row_right.prop(lt, "circle_radius", text="")
box.prop(lt, "circle_regular")
box.separator()
-
+
box.prop(lt, "circle_influence")
-
+
# curve - first line
split = col.split(percentage=0.15, align=True)
if lt.display_curve:
@@ -4055,9 +4054,9 @@ class VIEW3D_PT_tools_looptools(bpy.types.Panel):
box.prop(lt, "curve_boundaries")
box.prop(lt, "curve_regular")
box.separator()
-
+
box.prop(lt, "curve_influence")
-
+
# flatten - first line
split = col.split(percentage=0.15, align=True)
if lt.display_flatten:
@@ -4071,9 +4070,9 @@ class VIEW3D_PT_tools_looptools(bpy.types.Panel):
box.prop(lt, "flatten_plane")
#box.prop(lt, "flatten_restriction")
box.separator()
-
+
box.prop(lt, "flatten_influence")
-
+
# gstretch - first line
split = col.split(percentage=0.15, align=True)
if lt.display_gstretch:
@@ -4087,7 +4086,7 @@ class VIEW3D_PT_tools_looptools(bpy.types.Panel):
box.prop(lt, "gstretch_method")
box.prop(lt, "gstretch_delete_strokes")
box.separator()
-
+
col_conv = box.column(align=True)
col_conv.prop(lt, "gstretch_conversion", text="")
if lt.gstretch_conversion == 'distance':
@@ -4099,9 +4098,9 @@ class VIEW3D_PT_tools_looptools(bpy.types.Panel):
elif lt.gstretch_conversion == 'vertices':
col_conv.prop(lt, "gstretch_conversion_vertices")
box.separator()
-
+
box.prop(lt, "gstretch_influence")
-
+
# loft - first line
split = col.split(percentage=0.15, align=True)
if lt.display_loft:
@@ -4113,7 +4112,7 @@ class VIEW3D_PT_tools_looptools(bpy.types.Panel):
if lt.display_loft:
box = col.column(align=True).box().column()
#box.prop(self, "mode")
-
+
# top row
col_top = box.column(align=True)
row = col_top.row(align=True)
@@ -4132,13 +4131,13 @@ class VIEW3D_PT_tools_looptools(bpy.types.Panel):
# boolean properties
col_top.prop(lt, "bridge_remove_faces")
col_top.prop(lt, "bridge_loft_loop")
-
+
# override properties
col_top.separator()
row = box.row(align = True)
row.prop(lt, "bridge_twist")
row.prop(lt, "bridge_reverse")
-
+
# relax - first line
split = col.split(percentage=0.15, align=True)
if lt.display_relax:
@@ -4153,7 +4152,7 @@ class VIEW3D_PT_tools_looptools(bpy.types.Panel):
box.prop(lt, "relax_input")
box.prop(lt, "relax_iterations")
box.prop(lt, "relax_regular")
-
+
# space - first line
split = col.split(percentage=0.15, align=True)
if lt.display_space:
@@ -4167,7 +4166,7 @@ class VIEW3D_PT_tools_looptools(bpy.types.Panel):
box.prop(lt, "space_interpolation")
box.prop(lt, "space_input")
box.separator()
-
+
box.prop(lt, "space_influence")
@@ -4177,7 +4176,7 @@ class LoopToolsProps(bpy.types.PropertyGroup):
Fake module like class
bpy.context.window_manager.looptools
"""
-
+
# general display properties
display_bridge = bpy.props.BoolProperty(name = "Bridge settings",
description = "Display settings of the Bridge tool",
@@ -4203,7 +4202,7 @@ class LoopToolsProps(bpy.types.PropertyGroup):
display_space = bpy.props.BoolProperty(name = "Space settings",
description = "Display settings of the Space tool",
default = False)
-
+
# bridge properties
bridge_cubic_strength = bpy.props.FloatProperty(name = "Strength",
description = "Higher strength results in more fluid curves",
@@ -4253,7 +4252,7 @@ class LoopToolsProps(bpy.types.PropertyGroup):
bridge_twist = bpy.props.IntProperty(name = "Twist",
description = "Twist what vertices are connected to each other",
default = 0)
-
+
# circle properties
circle_custom_radius = bpy.props.BoolProperty(name = "Radius",
description = "Force a custom radius",
@@ -4283,7 +4282,7 @@ class LoopToolsProps(bpy.types.PropertyGroup):
description = "Distribute vertices at constant distances along the " \
"circle",
default = True)
-
+
# curve properties
curve_boundaries = bpy.props.BoolProperty(name = "Boundaries",
description = "Limit the tool to work within the boundaries of the "\
@@ -4313,7 +4312,7 @@ class LoopToolsProps(bpy.types.PropertyGroup):
"extrusions)")),
description = "Restrictions on how the vertices can be moved",
default = 'none')
-
+
# flatten properties
flatten_influence = bpy.props.FloatProperty(name = "Influence",
description = "Force of the tool",
@@ -4336,7 +4335,7 @@ class LoopToolsProps(bpy.types.PropertyGroup):
"movement inside the bounding box of the selection")),
description = "Restrictions on how the vertices can be moved",
default = 'none')
-
+
# gstretch properties
gstretch_conversion = bpy.props.EnumProperty(name = "Conversion",
items = (("distance", "Distance", "Set the distance between vertices "\
@@ -4382,7 +4381,7 @@ class LoopToolsProps(bpy.types.PropertyGroup):
gstretch_delete_strokes = bpy.props.BoolProperty(name="Delete strokes",
description = "Remove Grease Pencil strokes if they have been used "\
"for Gstretch. WARNING: DOES NOT SUPPORT UNDO",
- default = False)
+ default = False)
gstretch_influence = bpy.props.FloatProperty(name = "Influence",
description = "Force of the tool",
default = 100.0,
@@ -4400,7 +4399,7 @@ class LoopToolsProps(bpy.types.PropertyGroup):
description = "Method of distributing the vertices over the Grease "\
"Pencil stroke",
default = 'regular')
-
+
# relax properties
relax_input = bpy.props.EnumProperty(name = "Input",
items = (("all", "Parallel (all)", "Also use non-selected "\
@@ -4425,7 +4424,7 @@ class LoopToolsProps(bpy.types.PropertyGroup):
description = "Distribute vertices at constant distances along the" \
"loop",
default = True)
-
+
# space properties
space_influence = bpy.props.FloatProperty(name = "Influence",
description = "Force of the tool",
diff --git a/mesh_relax.py b/mesh_relax.py
index 579cce25..87ba4bd5 100644
--- a/mesh_relax.py
+++ b/mesh_relax.py
@@ -24,15 +24,14 @@
bl_info = {
"name": "Relax",
"author": "Fabian Fricke",
- "version": (1,1),
+ "version": (1, 1),
"blender": (2, 57, 0),
"location": "View3D > Specials > Relax ",
"description": "Relax the selected verts while retaining the shape",
"warning": "",
- "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/"\
+ "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/"
"Scripts/Modeling/Relax",
- "tracker_url": "https://projects.blender.org/tracker/index.php?"\
- "func=detail&aid=21421",
+ "tracker_url": "https://developer.blender.org/T21421",
"category": "Mesh"}
"""
@@ -70,10 +69,10 @@ def relax_mesh(context):
target.modifiers.remove(target.modifiers[0])
context.scene.objects.active = obj
-
+
sw = obj.modifiers.new(type='SHRINKWRAP', name='relax_target')
sw.target = target
-
+
# run smooth operator to relax the mesh
bpy.ops.object.mode_set(mode='EDIT')
bpy.ops.mesh.vertices_smooth()
@@ -81,12 +80,12 @@ def relax_mesh(context):
# apply the modifier
bpy.ops.object.modifier_apply(modifier='relax_target')
-
+
# delete the target object
obj.select = False
target.select = True
bpy.ops.object.delete()
-
+
# go back to initial state
obj.select = True
bpy.ops.object.mode_set(mode='EDIT')
diff --git a/mocap/__init__.py b/mocap/__init__.py
index 92dd8332..68ca2bb2 100644
--- a/mocap/__init__.py
+++ b/mocap/__init__.py
@@ -22,11 +22,12 @@ bl_info = {
"name": "Motion Capture Tools",
"author": "Benjy Cook",
"blender": (2, 62, 0),
- "location": "Object UI -> Mocap tools",
+ "location": "Object UI > Mocap tools",
"description": "Various tools for working with motion capture animation",
"warning": "",
- "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/Scripts/Animation/Motion_Capture_Tools",
- "tracker_url": "http://projects.blender.org/tracker/index.php?func=detail&aid=28321",
+ "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/"
+ "Scripts/Animation/Motion_Capture_Tools",
+ "tracker_url": "https://developer.blender.org/T28321",
"support": 'OFFICIAL',
"category": "Animation"}
diff --git a/netrender/__init__.py b/netrender/__init__.py
index 60c87739..71bf1a33 100644
--- a/netrender/__init__.py
+++ b/netrender/__init__.py
@@ -26,10 +26,10 @@ bl_info = {
"location": "Render > Engine > Network Render",
"description": "Distributed rendering for Blender",
"warning": "Stable but still work in progress",
- "wiki_url": "http://wiki.blender.org/index.php/Doc:2.5/Manual/Render/Engines/Netrender",
- "tracker_url": "https://projects.blender.org/tracker/?func=detail&atid=469&aid=36513&group_id=153",
- "category": "Render",
-}
+ "wiki_url": "http://wiki.blender.org/index.php/Doc:2.5/Manual/"
+ "Render/Engines/Netrender",
+ "tracker_url": "https://developer.blender.org/T36513",
+ "category": "Render"}
# To support reload properly, try to access a package var, if it's there, reload everything
diff --git a/node_efficiency_tools.py b/node_efficiency_tools.py
index 84c85788..3f2aeeb3 100644
--- a/node_efficiency_tools.py
+++ b/node_efficiency_tools.py
@@ -24,8 +24,9 @@ bl_info = {
'location': "Node Editor Properties Panel or Ctrl-SPACE",
'description': "Various tools to enhance and speed up node-based workflow",
'warning': "",
- 'wiki_url': "http://wiki.blender.org/index.php/Extensions:2.6/Py/Scripts/Nodes/Nodes_Efficiency_Tools",
- 'tracker_url': "http://projects.blender.org/tracker/index.php?func=detail&aid=33543&group_id=153&atid=469",
+ 'wiki_url': "http://wiki.blender.org/index.php/Extensions:2.6/Py/"
+ "Scripts/Nodes/Nodes_Efficiency_Tools",
+ 'tracker_url': "https://developer.blender.org/T33543",
'category': "Node",
}
diff --git a/object_add_chain.py b/object_add_chain.py
index fc5f3bd2..336346e3 100644
--- a/object_add_chain.py
+++ b/object_add_chain.py
@@ -24,10 +24,9 @@ bl_info = {
"location": "View3D > Add > Mesh",
"description": "Adds Chain with curve guide for easy creation",
"warning": "",
- "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/"\
+ "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/"
"Scripts/Object/Add_Chain",
- "tracker_url": "https://projects.blender.org/tracker/index.php?"\
- "func=detail&aid=22203",
+ "tracker_url": "https://developer.blender.org/T22203",
"category": "Object"}
import bpy
@@ -38,7 +37,7 @@ def Add_Chain():
bpy.ops.object.add(type='EMPTY',
view_align=False,
enter_editmode=False,
- location=(0, 0, 0),
+ location=(0, 0, 0),
rotation=(0, 0, 0),
)
@@ -71,10 +70,10 @@ def Add_Chain():
##Positions Torus primitive to center of scene
bpy.context.active_object.location = 0.0, 0.0, 0.0
-
+
##Reseting Torus rotation in case of 'Align to view' option enabled
bpy.context.active_object.rotation_euler = 0.0, 0.0, 0.0
-
+
##Changes Torus name to chain adds variable tor
tor = bpy.context.object
@@ -89,7 +88,7 @@ def Add_Chain():
##Smooths tor
bpy.ops.object.shade_smooth()
- ##Select curv
+ ##Select curv
sce = bpy.context.scene
sce.objects.active = curv
diff --git a/object_animrenderbake.py b/object_animrenderbake.py
index c43b4f83..4670a304 100644
--- a/object_animrenderbake.py
+++ b/object_animrenderbake.py
@@ -26,19 +26,7 @@ bl_info = {
"category": "Object",
"wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/"
"Scripts/Object/Animated_Render_Baker",
- "tracker_url": "https://projects.blender.org/tracker/index.php?"
- "func=detail&aid=24836"}
-
-import bpy
-from bpy.props import IntProperty
-
-class OBJECT_OT_animrenderbake(bpy.types.Operator):
- bl_label = "Animated Render Bake"
- bl_description= "Bake animated image textures of selected objects"
- bl_idname = "object.anim_bake_image"
- bl_register = True
-
- def framefile(self, filepath, frame):
+ "tracker_url": "https://developer.blender.org/T24836", filepath, frame):
"""
Set frame number to file name image.png -> image0013.png
"""
diff --git a/object_cloud_gen.py b/object_cloud_gen.py
index 7ab5231e..e618c5da 100644
--- a/object_cloud_gen.py
+++ b/object_cloud_gen.py
@@ -27,8 +27,7 @@ bl_info = {
"description": "Creates Volumetric Clouds",
"wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/"
"Scripts/Object/Cloud_Gen",
- "tracker_url": "https://projects.blender.org/tracker/index.php?"
- "func=detail&aid=22015",
+ "tracker_url": "https://developer.blender.org/T22015",
"category": "Object"}
import bpy
diff --git a/object_edit_linked.py b/object_edit_linked.py
index d5ea8399..cbb75654 100644
--- a/object_edit_linked.py
+++ b/object_edit_linked.py
@@ -25,7 +25,7 @@ bl_info = {
"location": "View3D > Toolshelf > Edit Linked Library",
"description": "Allows editing of objects linked from a .blend library.",
"wiki_url": "http://wiki.blender.org/index.php?title=Extensions:2.6/Py/Scripts/Object/Edit_Linked_Library",
- "tracker_url": "http://projects.blender.org/tracker/index.php?func=detail&aid=29630",
+ "tracker_url": "https://developer.blender.org/T29630",
"category": "Object"}
diff --git a/object_fracture/__init__.py b/object_fracture/__init__.py
index 84da8b72..5b0d7a65 100644
--- a/object_fracture/__init__.py
+++ b/object_fracture/__init__.py
@@ -21,13 +21,12 @@ bl_info = {
"author": "pildanovak",
"version": (2, 0),
"blender": (2, 57, 0),
- "location": "Search > Fracture Object & Add -> Fracture Helper Objects",
+ "location": "Search > Fracture Object & Add > Fracture Helper Objects",
"description": "Fractured Object, Bomb, Projectile, Recorder",
"warning": "",
- "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/"\
+ "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/"
"Scripts/Object/Fracture",
- "tracker_url": "https://projects.blender.org/tracker/index.php?"\
- "func=detail&aid=21793",
+ "tracker_url": "https://developer.blender.org/T21793",
"category": "Object"}
diff --git a/object_grease_scatter.py b/object_grease_scatter.py
index 8e9d6556..a54df839 100644
--- a/object_grease_scatter.py
+++ b/object_grease_scatter.py
@@ -31,8 +31,7 @@ bl_info = {
"warning": "",
"wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/"
"Scripts/Object/Grease_Scatter",
- "tracker_url": "https://projects.blender.org/tracker/index.php?"
- "func=detail&aid=TODO",
+ "tracker_url": "https://developer.blender.org",
"support": 'OFFICIAL',
"category": "Object"}
diff --git a/paint_palette.py b/paint_palette.py
index 92de64b0..c8c6caba 100644
--- a/paint_palette.py
+++ b/paint_palette.py
@@ -29,7 +29,7 @@ bl_info = {
"description": "Palettes for color and weight paint modes",
"warning": "",
"wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/Scripts/Paint/Palettes",
- "tracker_url": "http://projects.blender.org/tracker/index.php?func=detail&aid=25908",
+ "tracker_url": "https://developer.blender.org/T25908",
"category": "Paint"}
"""
diff --git a/render_copy_settings/__init__.py b/render_copy_settings/__init__.py
index 9b42123a..38895c5e 100644
--- a/render_copy_settings/__init__.py
+++ b/render_copy_settings/__init__.py
@@ -24,11 +24,13 @@ bl_info = {
"version": (0, 1, 5),
"blender": (2, 65, 9),
"location": "Render buttons (Properties window)",
- "description": "Allows to copy a selection of render settings from current scene to others.",
- "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/Scripts/Render/Copy Settings",
- "tracker_url": "http://projects.blender.org/tracker/index.php?func=detail&aid=25832",
- "category": "Render",
-}
+ "description": "Allows to copy a selection of render settings "
+ "from current scene to others.",
+ "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/"
+ "Scripts/Render/Copy Settings",
+ "tracker_url": "https://developer.blender.org/T25832",
+ "category": "Render"}
+
if "bpy" in locals():
import imp
diff --git a/render_povray/__init__.py b/render_povray/__init__.py
index 16a1e5da..1aa72f17 100644
--- a/render_povray/__init__.py
+++ b/render_povray/__init__.py
@@ -20,18 +20,19 @@
bl_info = {
"name": "POV-Ray 3.7",
- "author": "Campbell Barton, Silvio Falcinelli, Maurice Raybaud, Constantin Rahn, Bastien Montagne",
+ "author": "Campbell Barton, Silvio Falcinelli, Maurice Raybaud, "
+ "Constantin Rahn, Bastien Montagne",
"version": (0, 0, 9),
"blender": (2, 57, 0),
"location": "Render > Engine > POV-Ray 3.7",
"description": "Basic POV-Ray 3.7 integration for blender",
"warning": "both POV-Ray 3.7 and this script are beta",
"wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/"
- "Scripts/Render/POV-Ray",
- "tracker_url": "https://projects.blender.org/tracker/index.php?"
- "func=detail&aid=23145",
+ "Scripts/Render/POV-Ray",
+ "tracker_url": "https://developer.blender.org/T23145",
"category": "Render"}
+
if "bpy" in locals():
import imp
imp.reload(ui)
@@ -54,7 +55,7 @@ else:
from . import ui
from . import render
from . import update_files
-
+
###############################################################################
diff --git a/render_renderfarmfi/__init__.py b/render_renderfarmfi/__init__.py
index 702593f4..d09892ec 100644
--- a/render_renderfarmfi/__init__.py
+++ b/render_renderfarmfi/__init__.py
@@ -18,16 +18,16 @@
bl_info = {
"name": "Renderfarm.fi",
- "author": "Nathan Letwory <nathan@letworyinteractive.com>, Jesse Kaukonen <jesse.kaukonen@gmail.com>",
+ "author": "Nathan Letwory <nathan@letworyinteractive.com>, "
+ "Jesse Kaukonen <jesse.kaukonen@gmail.com>",
"version": (23,),
"blender": (2, 63, 0),
"location": "Render > Engine > Renderfarm.fi",
"description": "Send .blend as session to http://www.renderfarm.fi to render",
"warning": "",
- "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.5/Py/"\
+ "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.5/Py/"
"Scripts/Render/Renderfarm.fi",
- "tracker_url": "https://projects.blender.org/tracker/index.php?"\
- "func=detail&aid=22927",
+ "tracker_url": "https://developer.blender.org/T22927",
"category": "Render"}
"""
diff --git a/space_view3d_3d_navigation.py b/space_view3d_3d_navigation.py
index 8b229ca5..93c6afc7 100644
--- a/space_view3d_3d_navigation.py
+++ b/space_view3d_3d_navigation.py
@@ -1,6 +1,6 @@
# 3D NAVIGATION TOOLBAR v1.2 - 3Dview Addon - Blender 2.5x
#
-# THIS SCRIPT IS LICENSED UNDER GPL,
+# THIS SCRIPT IS LICENSED UNDER GPL,
# please read the license block.
#
# ##### BEGIN GPL LICENSE BLOCK #####
@@ -29,10 +29,9 @@ bl_info = {
"location": "View3D > Tool Shelf > 3D Nav",
"description": "Navigate the Camera & 3D View from the Toolshelf",
"warning": "",
- "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/"\
+ "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/"
"Scripts/3D_interaction/3D_Navigation",
- "tracker_url": "http://projects.blender.org/tracker/index.php?"\
- "func=detail&aid=23530",
+ "tracker_url": "https://developer.blender.org/T23530",
"category": "3D View"}
# import the basic library
@@ -49,11 +48,11 @@ class VIEW3D_PT_3dnavigationPanel(bpy.types.Panel):
layout = self.layout
view = context.space_data
-# Triple boutons
+# Triple boutons
col = layout.column(align=True)
col.operator("view3d.viewnumpad", text="View Camera", icon='CAMERA_DATA').type='CAMERA'
col.operator("view3d.localview", text="View Global/Local")
- col.operator("view3d.view_persportho", text="View Persp/Ortho")
+ col.operator("view3d.view_persportho", text="View Persp/Ortho")
# group of 6 buttons
col = layout.column(align=True)
@@ -72,27 +71,27 @@ class VIEW3D_PT_3dnavigationPanel(bpy.types.Panel):
col = layout.column(align=True)
col.label(text="View to Object:")
col.prop(view, "lock_object", text="")
- col.operator("view3d.view_selected", text="View to Selected")
-
+ col.operator("view3d.view_selected", text="View to Selected")
+
col = layout.column(align=True)
col.label(text="Cursor:")
-
+
row = col.row()
row.operator("view3d.snap_cursor_to_center", text="Center")
row.operator("view3d.view_center_cursor", text="View")
-
+
col.operator("view3d.snap_cursor_to_selected", text="Cursor to Selected")
# register the class
def register():
bpy.utils.register_module(__name__)
-
- pass
+
+ pass
def unregister():
bpy.utils.unregister_module(__name__)
-
- pass
-if __name__ == "__main__":
+ pass
+
+if __name__ == "__main__":
register()
diff --git a/space_view3d_copy_attributes.py b/space_view3d_copy_attributes.py
index 49bc1b22..d5139383 100644
--- a/space_view3d_copy_attributes.py
+++ b/space_view3d_copy_attributes.py
@@ -27,8 +27,7 @@ bl_info = {
"description": "Copy Attributes Menu from Blender 2.4",
"wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/"
"Scripts/3D_interaction/Copy_Attributes_Menu",
- "tracker_url": "https://projects.blender.org/tracker/index.php?"
- "func=detail&aid=22588",
+ "tracker_url": "https://developer.blender.org/T22588",
"category": "3D View"}
import bpy
diff --git a/space_view3d_materials_utils.py b/space_view3d_materials_utils.py
index 8d594650..9d275f97 100644
--- a/space_view3d_materials_utils.py
+++ b/space_view3d_materials_utils.py
@@ -30,8 +30,7 @@ bl_info = {
"warning": "Buggy, Broken in Cycles mode",
"wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/"
"Scripts/3D interaction/Materials Utils",
- "tracker_url": "https://projects.blender.org/tracker/index.php?"
- "func=detail&aid=22140",
+ "tracker_url": "https://developer.blender.org/T22140",
"category": "Material"}
"""
diff --git a/space_view3d_math_vis/__init__.py b/space_view3d_math_vis/__init__.py
index 56828951..226eafb0 100644
--- a/space_view3d_math_vis/__init__.py
+++ b/space_view3d_math_vis/__init__.py
@@ -26,12 +26,12 @@ bl_info = {
"location": "View3D > Tool Shelf or Console",
"description": "Display console defined mathutils variables in the 3D view",
"wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/"
- "Scripts/3D_interaction/Math_Viz",
- "tracker_url": "http://projects.blender.org/tracker/index.php?"
- "func=detail&aid=25545",
+ "Scripts/3D_interaction/Math_Viz",
+ "tracker_url": "https://developer.blender.org/T25545",
"support": "OFFICIAL",
"category": "3D View"}
+
if "bpy" in locals():
import imp
imp.reload(utils)
diff --git a/space_view3d_panel_measure.py b/space_view3d_panel_measure.py
index 6897871a..008cd108 100644
--- a/space_view3d_panel_measure.py
+++ b/space_view3d_panel_measure.py
@@ -34,8 +34,7 @@ bl_info = {
"warning": "Script needs repairs",
"wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/" \
"Scripts/3D_interaction/Panel_Measure",
- "tracker_url": "https://projects.blender.org/tracker/index.php?" \
- "func=detail&aid=21445",
+ "tracker_url": "https://developer.blender.org/T21445",
"category": "3D View"}
"""
diff --git a/space_view3d_screencast_keys.py b/space_view3d_screencast_keys.py
index f568c31a..a7637afc 100644
--- a/space_view3d_screencast_keys.py
+++ b/space_view3d_screencast_keys.py
@@ -26,11 +26,10 @@ bl_info = {
"location": "3D View > Properties Panel > Screencast Keys",
"warning": "",
"description": "Display keys pressed in the 3D View, "
- "useful for screencasts.",
- "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/"
- "Py/Scripts/3D_interaction/Screencast_Key_Status_Tool",
- "tracker_url": "http://projects.blender.org/tracker/index.php?"
- "func=detail&aid=21612",
+ "useful for screencasts.",
+ "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/"
+ "Scripts/3D_interaction/Screencast_Key_Status_Tool",
+ "tracker_url": "https://developer.blender.org/T21612",
"category": "3D View"}
import bgl
@@ -98,7 +97,7 @@ def draw_callback_px_text(self, context):
shift = 0
# we want to make sure we can shift vertically the text if the mouse is big,
- # but don't care if aligned to right
+ # but don't care if aligned to right
if mouse_size > font_size*row_count and not sc.screencast_keys_mouse_position == 'right':
shift = (mouse_size - font_size*row_count) / 2
@@ -126,7 +125,7 @@ def draw_callback_px_text(self, context):
else:
break
- # remove blurriness
+ # remove blurriness
# disable shadows so they don't appear all over blender
blf.blur(0,0)
diff --git a/space_view3d_spacebar_menu.py b/space_view3d_spacebar_menu.py
index 6bff10d0..2d8c5991 100644
--- a/space_view3d_spacebar_menu.py
+++ b/space_view3d_spacebar_menu.py
@@ -26,10 +26,9 @@ bl_info = {
"location": "View3D > Spacebar Key",
"description": "Context Sensitive Spacebar Menu",
"warning": "",
- "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/"\
+ "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/"
"Scripts/3D_interaction/Dynamic_Spacebar_Menu",
- "tracker_url": "https://projects.blender.org/tracker/index.php?"\
- "func=detail&aid=22060",
+ "tracker_url": "https://developer.blender.org/T22060",
"category": "3D View"}
import bpy
diff --git a/system_blend_info.py b/system_blend_info.py
index 8e7ecc33..f6487d02 100644
--- a/system_blend_info.py
+++ b/system_blend_info.py
@@ -30,8 +30,7 @@ bl_info = {
"warning": "",
"wiki_url": 'http://wiki.blender.org/index.php/Extensions:2.6/Py/' \
'Scripts/System/Blend Info',
- "tracker_url": "https://projects.blender.org/tracker/index.php?" \
- "func=detail&aid=22102",
+ "tracker_url": "https://developer.blender.org/T22102",
"category": "System"}
import bpy
diff --git a/system_property_chart.py b/system_property_chart.py
index 4ed28905..34de6d82 100644
--- a/system_property_chart.py
+++ b/system_property_chart.py
@@ -29,8 +29,7 @@ bl_info = {
"warning": "",
"wiki_url": ("http://wiki.blender.org/index.php/Extensions:2.6/Py/"
"Scripts/System/Object Property Chart"),
- "tracker_url": ("https://projects.blender.org/tracker/index.php?"
- "func=detail&aid=22701"),
+ "tracker_url": "https://developer.blender.org/T22701",
"category": "System"}
"""List properties of selected objects"""
diff --git a/texture_paint_layer_manager.py b/texture_paint_layer_manager.py
index 0a4e23a5..06ff1100 100644
--- a/texture_paint_layer_manager.py
+++ b/texture_paint_layer_manager.py
@@ -7,7 +7,7 @@ bl_info = {
"description": "Adds a layer manager for image based texture slots in paint and quick add layer tools",
"warning": "",
"wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/Scripts/3D_interaction/Texture_paint_layers",
- "tracker_url": "http://projects.blender.org/tracker/index.php?func=detail&aid=26789",
+ "tracker_url": "https://developer.blender.org/T26789",
"category": "Paint"}
diff --git a/ui_translate/__init__.py b/ui_translate/__init__.py
index 99f608b4..71c42185 100644
--- a/ui_translate/__init__.py
+++ b/ui_translate/__init__.py
@@ -24,11 +24,11 @@ bl_info = {
"version": (1, 1, 1),
"blender": (2, 66, 6),
"location": "Main \"File\" menu, text editor, any UI control",
- "description": "Allow to manage UI translations directly from Blender (update main po files, "
- "update scripts' translations, etc.)",
+ "description": "Allow to manage UI translations directly from Blender "
+ "(update main po files, update scripts' translations, etc.)",
"warning": "Still in development, not all features are fully implemented yet!",
"wiki_url": "http://wiki.blender.org/index.php/Dev:Doc/How_to/Translate_Blender",
- "tracker_url": "http://projects.blender.org/tracker/?atid=498&group_id=9&func=browse",
+ "tracker_url": "",
"support": 'OFFICIAL',
"category": "System"}
diff --git a/ui_translate/edit_translation.py b/ui_translate/edit_translation.py
index 03c42d00..b4d02a60 100644
--- a/ui_translate/edit_translation.py
+++ b/ui_translate/edit_translation.py
@@ -1,4 +1,4 @@
-# ##### BEGIN GPL LICENSE BLOCK #####
+# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
@@ -85,7 +85,7 @@ class UI_OT_i18n_edittranslation_update_mo(bpy.types.Operator):
class UI_OT_i18n_edittranslation(bpy.types.Operator):
- """Translate the label and tool tip of the property defined by given 'parameters'"""
+ """Translate the label and tooltip of the property defined by given 'parameters'"""
bl_idname = "ui.edittranslation"
bl_label = "Edit Translation"
diff --git a/uv_bake_texture_to_vcols.py b/uv_bake_texture_to_vcols.py
index 8a9a5aea..3bb84af2 100644
--- a/uv_bake_texture_to_vcols.py
+++ b/uv_bake_texture_to_vcols.py
@@ -22,24 +22,25 @@
Bake UV-Texture to Vertex Colors Addon
Contact: p_boelens@msn.com
-Information: http://projects.blender.org/tracker/index.php?func=detail&aid=28211
+Information: https://developer.blender.org/T28211
Contributor(s): Patrick Boelens, CoDEmanX.
-
+
All rights reserved.
"""
bl_info = {
"name": "Bake UV-Texture to Vertex Colors",
- "description": "Bakes the colors of the active UV Texture to a Vertex Color layer. ",
+ "description": "Bakes the colors of the active UV Texture "
+ "to a Vertex Color layer.",
"author": "Patrick Boelens, CoDEmanX",
"version": (0, 6),
"blender": (2, 63, 0),
"location": "3D View > Vertex Paint > Toolshelf > Bake",
"warning": "Requires image texture, generated textures aren't supported.",
- "wiki_url": "http://wiki.blender.org/index.php?title=Extensions:2.6/"
- "Py/Scripts/UV/Bake_Texture_to_Vertex_Colors",
- "tracker_url": "http://projects.blender.org/tracker/index.php?func=detail&aid=28211",
+ "wiki_url": "http://wiki.blender.org/index.php?title=Extensions:2.6/Py/"
+ "Scripts/UV/Bake_Texture_to_Vertex_Colors",
+ "tracker_url": "https://developer.blender.org/T28211",
"category": "UV"}
import bpy
@@ -50,23 +51,26 @@ from colorsys import rgb_to_hsv, hsv_to_rgb
class UV_OT_bake_texture_to_vcols(bpy.types.Operator):
bl_idname = "uv.bake_texture_to_vcols"
bl_label = "Bake UV-Texture to Vertex Colors"
- bl_description = "Bake active UV-Texture to new Vertex Color layer (requires image texture)"
+ bl_description = "Bake active UV-Texture to new Vertex Color layer "\
+ "(requires image texture)"
bl_options = {'REGISTER', 'UNDO'}
-
- replace_active_layer = BoolProperty(name="Replace layer",
- description="Overwrite active Vertex Color layer",
- default=True)
-
- mappingModes = [("CLIP", "Clip", "Don't affect vertices who's UV-coordinates are out of bounds."),
- ("REPEAT", "Repeat", "Tile the image so that each vertex is accounted for."),
- ("EXTEND", "Extend", "Extends the edges of the image to the UV-coordinates.")
- ]
-
- mappingMode = EnumProperty(items=mappingModes,
- default="CLIP",
- name="Mapping",
- description="The mode to use for baking vertices who's UV-coordinates are out of bounds.")
-
+
+ replace_active_layer = BoolProperty(
+ name="Replace layer",
+ description="Overwrite active Vertex Color layer",
+ default=True)
+
+ mappingModes = [
+ ("CLIP", "Clip", "Don't affect vertices who's UV-coordinates are out of bounds."),
+ ("REPEAT", "Repeat", "Tile the image so that each vertex is accounted for."),
+ ("EXTEND", "Extend", "Extends the edges of the image to the UV-coordinates.")]
+
+ mappingMode = EnumProperty(
+ items=mappingModes,
+ default="CLIP",
+ name="Mapping",
+ description="The mode to use for baking vertices who's UV-coordinates are out of bounds.")
+
blendingModes = [("MIX", "Mix", ""),
("ADD", "Add", ""),
("SUBTRACT", "Subtract", ""),
@@ -84,19 +88,20 @@ class UV_OT_bake_texture_to_vcols(bpy.types.Operator):
("SOFT_LIGHT", "Soft Light", ""),
("LINEAR_LIGHT", "Linear Light", "")
]
-
- blendingMode = EnumProperty(items=blendingModes,
- default="MULTIPLY",
- name="Blend Type",
- description="The blending mode to use when baking")
-
+
+ blendingMode = EnumProperty(
+ items=blendingModes,
+ default="MULTIPLY",
+ name="Blend Type",
+ description="The blending mode to use when baking")
+
mirror_x = BoolProperty(name="Mirror X", description="Mirror the image on the X-axis.")
mirror_y = BoolProperty(name="Mirror Y", description="Mirror the image on the Y-axis.")
-
+
@classmethod
def poll(self, context):
return (context.object and
- context.object.type == 'MESH' and
+ context.object.type == 'MESH' and
context.mode != 'EDIT_MESH' and
context.object.data.uv_layers.active and
context.object.data.uv_textures.active)
@@ -108,39 +113,45 @@ class UV_OT_bake_texture_to_vcols(bpy.types.Operator):
vertex_colors = obdata.vertex_colors.active
else:
vertex_colors = obdata.vertex_colors.new(name="Baked UV texture")
-
+
if not vertex_colors:
+
# Can't add more than 17 VCol layers
- self.report({'ERROR'}, "Couldn't add another Vertex Color layer,\n"
- "Please remove an existing layer or replace active.")
+ self.report({'ERROR'},
+ "Couldn't add another Vertex Color layer,\n"
+ "Please remove an existing layer or replace active.")
+
return {'CANCELLED'}
-
+
obdata.vertex_colors.active = vertex_colors
-
+
uv_images = {}
for uv_tex in obdata.uv_textures.active.data:
- if uv_tex.image and uv_tex.image.name not in uv_images and uv_tex.image.pixels:
-
- uv_images[uv_tex.image.name] = (uv_tex.image.size[0],
- uv_tex.image.size[1],
- uv_tex.image.pixels[:]
- # Accessing pixels directly is far too slow.
- # Copied to new array for massive performance-gain.
- )
-
+ if (uv_tex.image and
+ uv_tex.image.name not in uv_images and
+ uv_tex.image.pixels):
+
+ uv_images[uv_tex.image.name] = (
+ uv_tex.image.size[0],
+ uv_tex.image.size[1],
+ uv_tex.image.pixels[:]
+ # Accessing pixels directly is far too slow.
+ # Copied to new array for massive performance-gain.
+ )
+
for p in obdata.polygons:
img = obdata.uv_textures.active.data[p.index].image
if not img:
continue
-
+
image_size_x, image_size_y, uv_pixels = uv_images[img.name]
-
+
for loop in p.loop_indices:
-
+
co = obdata.uv_layers.active.data[loop].uv
x_co = round(co[0] * (image_size_x - 1))
y_co = round(co[1] * (image_size_y - 1))
-
+
if x_co < 0 or x_co >= image_size_x or y_co < 0 or y_co >= image_size_y:
if self.mappingMode == 'CLIP':
continue
@@ -158,47 +169,47 @@ class UV_OT_bake_texture_to_vcols(bpy.types.Operator):
y_co = image_size_y - 1
if y_co < 0:
y_co = 0
-
+
if self.mirror_x:
x_co = image_size_x -1 - x_co
-
+
if self.mirror_y:
y_co = image_size_y -1 - y_co
-
+
col_out = vertex_colors.data[loop].color
-
+
pixelNumber = (image_size_x * y_co) + x_co
r = uv_pixels[pixelNumber*4]
g = uv_pixels[pixelNumber*4 + 1]
b = uv_pixels[pixelNumber*4 + 2]
a = uv_pixels[pixelNumber*4 + 3]
-
+
col_in = r, g, b # texture-color
col_result = [r,g,b] # existing / 'base' color
-
+
if self.blendingMode == 'MIX':
col_result = col_in
-
+
elif self.blendingMode == 'ADD':
col_result[0] = col_in[0] + col_out[0]
col_result[1] = col_in[1] + col_out[1]
col_result[2] = col_in[2] + col_out[2]
-
+
elif self.blendingMode == 'SUBTRACT':
col_result[0] = col_in[0] - col_out[0]
col_result[1] = col_in[1] - col_out[1]
col_result[2] = col_in[2] - col_out[2]
-
+
elif self.blendingMode == 'MULTIPLY':
col_result[0] = col_in[0] * col_out[0]
col_result[1] = col_in[1] * col_out[1]
col_result[2] = col_in[2] * col_out[2]
-
+
elif self.blendingMode == 'SCREEN':
col_result[0] = 1 - (1.0 - col_in[0]) * (1.0 - col_out[0])
col_result[1] = 1 - (1.0 - col_in[1]) * (1.0 - col_out[1])
col_result[2] = 1 - (1.0 - col_in[2]) * (1.0 - col_out[2])
-
+
elif self.blendingMode == 'OVERLAY':
if col_out[0] < 0.5:
col_result[0] = col_out[0] * (2.0 * col_in[0])
@@ -212,12 +223,12 @@ class UV_OT_bake_texture_to_vcols(bpy.types.Operator):
col_result[2] = col_out[2] * (2.0 * col_in[2])
else:
col_result[2] = 1.0 - (2.0 * (1.0 - col_in[2])) * (1.0 - col_out[2])
-
+
elif self.blendingMode == 'DIFFERENCE':
col_result[0] = fabs(col_in[0] - col_out[0])
col_result[1] = fabs(col_in[1] - col_out[1])
col_result[2] = fabs(col_in[2] - col_out[2])
-
+
elif self.blendingMode == 'DIVIDE':
if(col_in[0] != 0.0):
col_result[0] = col_out[0] / col_in[0]
@@ -225,7 +236,7 @@ class UV_OT_bake_texture_to_vcols(bpy.types.Operator):
col_result[0] = col_out[1] / col_in[1]
if(col_in[2] != 0.0):
col_result[2] = col_out[2] / col_in[2]
-
+
elif self.blendingMode == 'DARKEN':
if col_in[0] < col_out[0]:
col_result[0] = col_in[0]
@@ -239,8 +250,8 @@ class UV_OT_bake_texture_to_vcols(bpy.types.Operator):
col_result[2] = col_in[2]
else:
col_result[2] = col_out[2]
-
-
+
+
elif self.blendingMode == 'LIGHTEN':
if col_in[0] > col_out[0]:
col_result[0] = col_in[0]
@@ -254,42 +265,42 @@ class UV_OT_bake_texture_to_vcols(bpy.types.Operator):
col_result[2] = col_in[2]
else:
col_result[2] = col_out[2]
-
+
elif self.blendingMode == 'HUE':
hsv_in = rgb_to_hsv(col_in[0], col_in[1], col_in[2])
hsv_out = rgb_to_hsv(col_out[0], col_out[1], col_out[2])
hue = hsv_in[0]
col_result = hsv_to_rgb(hue, hsv_out[1], hsv_out[2])
-
+
elif self.blendingMode == 'SATURATION':
hsv_in = rgb_to_hsv(col_in[0], col_in[1], col_in[2])
hsv_out = rgb_to_hsv(col_out[0], col_out[1], col_out[2])
sat = hsv_in[1]
col_result = hsv_to_rgb(hsv_out[0], sat, hsv_out[2])
-
+
elif self.blendingMode == 'VALUE':
hsv_in = rgb_to_hsv(col_in[0], col_in[1], col_in[2])
hsv_out = rgb_to_hsv(col_out[0], col_out[1], col_out[2])
val = hsv_in[2]
col_result = hsv_to_rgb(hsv_out[0], hsv_out[1], val)
-
+
elif self.blendingMode == 'COLOR':
hsv_in = rgb_to_hsv(col_in[0], col_in[1], col_in[2])
hsv_out = rgb_to_hsv(col_out[0], col_out[1], col_out[2])
hue = hsv_in[0]
sat = hsv_in[1]
col_result = hsv_to_rgb(hue, sat, hsv_out[2])
-
+
elif self.blendingMode == 'SOFT_LIGHT':
scr = 1 - (1.0 - col_in[0]) * (1.0 - col_out[0])
scg = 1 - (1.0 - col_in[1]) * (1.0 - col_out[1])
scb = 1 - (1.0 - col_in[2]) * (1.0 - col_out[2])
-
+
col_result[0] = (1.0 - col_out[0]) * (col_in[0] * col_out[0]) + (col_out[0] * scr)
col_result[1] = (1.0 - col_out[1]) * (col_in[1] * col_out[1]) + (col_out[1] * scg)
col_result[2] = (1.0 - col_out[2]) * (col_in[2] * col_out[2]) + (col_out[2] * scb)
-
-
+
+
elif self.blendingMode == 'LINEAR_LIGHT':
if col_in[0] > 0.5:
col_result[0] = col_out[0] + 2.0 * (col_in[0] - 0.5)
@@ -303,16 +314,16 @@ class UV_OT_bake_texture_to_vcols(bpy.types.Operator):
col_result[2] = col_out[2] + 2.0 * (col_in[2] - 0.5)
else:
col_result[2] = col_out[2] + 2.0 * (col_in[2] - 1.0)
-
+
# Add alpha color
a_inverted = 1 - a
alpha_color = context.scene.uv_bake_alpha_color
col_result = (col_result[0] * a + alpha_color[0] * a_inverted,
col_result[1] * a + alpha_color[1] * a_inverted,
col_result[2] * a + alpha_color[2] * a_inverted)
-
+
vertex_colors.data[loop].color = col_result
-
+
return {'FINISHED'}
class VIEW3D_PT_tools_uv_bake_texture_to_vcols(bpy.types.Panel):
@@ -320,11 +331,11 @@ class VIEW3D_PT_tools_uv_bake_texture_to_vcols(bpy.types.Panel):
bl_space_type = "VIEW_3D"
bl_region_type = "TOOLS"
bl_options = {'DEFAULT_CLOSED'}
-
+
@classmethod
def poll(self, context):
return(context.mode == 'PAINT_VERTEX')
-
+
def draw(self, context):
layout = self.layout
col = layout.column()
@@ -334,7 +345,8 @@ class VIEW3D_PT_tools_uv_bake_texture_to_vcols(bpy.types.Panel):
def register():
bpy.utils.register_module(__name__)
- bpy.types.Scene.uv_bake_alpha_color = FloatVectorProperty(name="Alpha Color",
+ bpy.types.Scene.uv_bake_alpha_color = FloatVectorProperty(
+ name="Alpha Color",
description="Color to be used for transparency",
subtype='COLOR',
min=0.0,
@@ -345,4 +357,4 @@ def unregister():
del bpy.types.Scene.uv_bake_alpha_color
if __name__ == "__main__":
- register() \ No newline at end of file
+ register()
diff --git a/uv_texture_atlas.py b/uv_texture_atlas.py
index 8bdc1b4a..1fc34c37 100644
--- a/uv_texture_atlas.py
+++ b/uv_texture_atlas.py
@@ -25,7 +25,7 @@ bl_info = {
"location": "Properties > Render",
"description": "A simple Texture Atlas for unwrapping many objects. It creates additional UV",
"wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/Scripts/UV/TextureAtlas",
- "tracker_url": "http://projects.blender.org/scm/viewvc.php/trunk/py/scripts/addons/uv_texture_atlas.py?view=log&root=bf-extensions",
+ "tracker_url": "https://developer.blender.org/T32494",
"category": "UV"}
import bpy