Welcome to mirror list, hosted at ThFree Co, Russian Federation.

git.blender.org/blender.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorCampbell Barton <ideasman42@gmail.com>2006-05-21 03:24:26 +0400
committerCampbell Barton <ideasman42@gmail.com>2006-05-21 03:24:26 +0400
commit55755954ed2419399e16f23672848939c78ec524 (patch)
treec3f83a6d9b8d686e5d7871302c6e7dd088cf9bcb /release/scripts/bpymodules/BPyMesh_redux.py
parentf3ae4d0f4d0e89d07c7609cf38a7fbf11451cc90 (diff)
optional reuse list for meshCalcNormals, which makes decimation abt 5% faster.
Workaround for a problem where badly predicted positions are further then half the edge length, on these cases just collapse to the weighted middle of teh edge. Added docs for "PolyReduce" (Uses BPyMesh_Redux) and WIP Docs for AutoTex Layout. http://mediawiki.blender.org/index.php/Manual/PartXIII/Modelling_Scripts
Diffstat (limited to 'release/scripts/bpymodules/BPyMesh_redux.py')
-rw-r--r--release/scripts/bpymodules/BPyMesh_redux.py53
1 files changed, 30 insertions, 23 deletions
diff --git a/release/scripts/bpymodules/BPyMesh_redux.py b/release/scripts/bpymodules/BPyMesh_redux.py
index 56a4f749872..3f05ba29439 100644
--- a/release/scripts/bpymodules/BPyMesh_redux.py
+++ b/release/scripts/bpymodules/BPyMesh_redux.py
@@ -146,8 +146,11 @@ def redux(ob, REDUX=0.5, BOUNDRY_WEIGHT=5.0, FACE_AREA_WEIGHT=1.0, FACE_TRIANGUL
collapse_edges= collapse_faces= None
+ # So meshCalcNormals can avoid making a new list all the time.
+ reuse_vertNormals= [ Vector() for v in xrange(len(me.verts)) ]
+
while target_face_count <= len(me.faces):
- BPyMesh.meshCalcNormals(me)
+ BPyMesh.meshCalcNormals(me, reuse_vertNormals)
if DO_WEIGHTS:
groupNames, vWeightDict= BPyMesh.meshWeight2Dict(me)
@@ -279,6 +282,10 @@ def redux(ob, REDUX=0.5, BOUNDRY_WEIGHT=5.0, FACE_AREA_WEIGHT=1.0, FACE_TRIANGUL
v1no= ced.v1.co
v2no= ced.v2.co
+ # Basic operation, works fine but not as good as predicting the best place.
+ #between= ((v1co*w1) + (v2co*w2))
+ #ced.collapse_loc= between
+
# Use the vertex weights to bias the new location.
w1= vert_weights[ced.v1.index]
w2= vert_weights[ced.v2.index]
@@ -292,31 +299,31 @@ def redux(ob, REDUX=0.5, BOUNDRY_WEIGHT=5.0, FACE_AREA_WEIGHT=1.0, FACE_TRIANGUL
w2/=wscale
length= ced.length
+ between= (v1co+v2co) * 0.5
+
+ # Collapse
+ # new_location = between # Replace tricky code below. this code predicts the best collapse location.
+
+ # Make lines at right angles to the normals- these 2 lines will intersect and be
+ # the point of collapsing.
+
+ # Enlarge so we know they intersect: ced.length*2
+ cv1= CrossVecs(v1no, CrossVecs(v1no, v1co-v2co))
+ cv2= CrossVecs(v2no, CrossVecs(v2no, v2co-v1co))
+
+ # Scale to be less then the edge lengths.
+ cv1.normalize()
+ cv2.normalize()
+ cv1 = cv1 * (length* 0.4)
+ cv2 = cv2 * (length* 0.4)
+
+ smart_offset_loc= between + (cv1 + cv2)
- if 0:
- between= ((v1co*w1) + (v2co*w2))
+
+ if (smart_offset_loc-between).length > length/2:
+ # New collapse loc is way out, just use midpoint.
ced.collapse_loc= between
else:
- between= (v1co+v2co) * 0.5
-
- # Collapse
- # new_location = between # Replace tricky code below. this code predicts the best collapse location.
-
- # Make lines at right angles to the normals- these 2 lines will intersect and be
- # the point of collapsing.
-
- # Enlarge so we know they intersect: ced.length*2
- cv1= CrossVecs(v1no, CrossVecs(v1no, v1co-v2co))
- cv2= CrossVecs(v2no, CrossVecs(v2no, v2co-v1co))
-
- # Scale to be less then the edge lengths.
- cv1.normalize()
- cv2.normalize()
- cv1 = cv1 * (length* 0.4)
- cv2 = cv2 * (length* 0.4)
-
- smart_offset_loc= between + (cv1 + cv2)
-
# Now we need to blend between smart_offset_loc and w1/w2
# you see were blending between a vert and the edges midpoint, so we cant use a normal weighted blend.
if w1 > 0.5: # between v1 and smart_offset_loc