Welcome to mirror list, hosted at ThFree Co, Russian Federation.

git.blender.org/blender.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorTamito Kajiyama <rd6t-kjym@asahi-net.or.jp>2014-07-24 06:08:04 +0400
committerTamito Kajiyama <rd6t-kjym@asahi-net.or.jp>2014-07-24 09:29:35 +0400
commitb408d8af31c9fba5898e353c97f95f7ce8dc19c1 (patch)
tree45310106f7f6e59c9f6310e1a58ab5b1d1cadb46 /release
parent4eedec86819ff91f1c96405603650e2833c77b9a (diff)
Freestyle Python API: Updates and speedups for the parameter editor
In addition to D319, this patch updates the parameter editor, the UI of Freestyle. Using new API functionality and experience gained in making D319, this patch provides a quite noticable speedup for commonly-used Freestyle linestyle modifiers. As this patch touches a lot of code (and mainly the foundations) it is likely that mistakes are made. The patch has been tested with a regression suite for Freestyle (https://github.com/folkertdev/freestyle-regression-tests/tree/master), but testing with scenes used in production is very much appreciated. Differential revision: https://developer.blender.org/D623 Author: flokkievids (Folkert de Vries) Reviewed by: kjym3 (Tamito Kajiyama)
Diffstat (limited to 'release')
-rw-r--r--release/scripts/freestyle/modules/freestyle/functions.py33
-rw-r--r--release/scripts/freestyle/modules/freestyle/predicates.py61
-rw-r--r--release/scripts/freestyle/modules/freestyle/utils.py300
-rw-r--r--release/scripts/freestyle/modules/parameter_editor.py1012
4 files changed, 567 insertions, 839 deletions
diff --git a/release/scripts/freestyle/modules/freestyle/functions.py b/release/scripts/freestyle/modules/freestyle/functions.py
index 9e03f8f5dbb..8511989309d 100644
--- a/release/scripts/freestyle/modules/freestyle/functions.py
+++ b/release/scripts/freestyle/modules/freestyle/functions.py
@@ -91,28 +91,26 @@ from freestyle.utils import integrate
from mathutils import Vector
-
# -- Functions for 0D elements (vertices) -- #
class CurveMaterialF0D(UnaryFunction0DMaterial):
"""
A replacement of the built-in MaterialF0D for stroke creation.
- MaterialF0D does not work with Curves and Strokes. Line color
+ MaterialF0D does not work with Curves and Strokes. Line color
priority is used to pick one of the two materials at material
boundaries.
+
+ Note: expects instances of CurvePoint to be iterated over
"""
def __call__(self, inter):
- cp = inter.object
- assert(isinstance(cp, CurvePoint))
- fe = cp.first_svertex.get_fedge(cp.second_svertex)
+ fe = inter.object.fedge
assert(fe is not None), "CurveMaterialF0D: fe is None"
if fe.is_smooth:
return fe.material
- elif fe.material_right.priority > fe.material_left.priority:
- return fe.material_right
else:
- return fe.material_left
+ right, left = fe.material_right, fe.material_left
+ return right if (right.priority > left.priority) else left
class pyInverseCurvature2DAngleF0D(UnaryFunction0DDouble):
@@ -131,7 +129,7 @@ class pyCurvilinearLengthF0D(UnaryFunction0DDouble):
class pyDensityAnisotropyF0D(UnaryFunction0DDouble):
"""Estimates the anisotropy of density"""
- def __init__(self,level):
+ def __init__(self, level):
UnaryFunction0DDouble.__init__(self)
self.IsoDensity = ReadCompleteViewMapPixelF0D(level)
self.d0Density = ReadSteerableViewMapPixelF0D(0, level)
@@ -145,9 +143,9 @@ class pyDensityAnisotropyF0D(UnaryFunction0DDouble):
c_1 = self.d1Density(inter)
c_2 = self.d2Density(inter)
c_3 = self.d3Density(inter)
- cMax = max(max(c_0,c_1), max(c_2,c_3))
- cMin = min(min(c_0,c_1), min(c_2,c_3))
- return 0 if (c_iso == 0) else (cMax-cMin) / c_iso
+ cMax = max(max(c_0, c_1), max(c_2, c_3))
+ cMin = min(min(c_0, c_1), min(c_2, c_3))
+ return 0 if (c_iso == 0) else (cMax - cMin) / c_iso
class pyViewMapGradientVectorF0D(UnaryFunction0DVec2f):
@@ -163,9 +161,9 @@ class pyViewMapGradientVectorF0D(UnaryFunction0DVec2f):
def __call__(self, iter):
p = iter.object.point_2d
- gx = CF.read_complete_view_map_pixel(self._l, int(p.x+self._step), int(p.y)) - \
+ gx = CF.read_complete_view_map_pixel(self._l, int(p.x + self._step), int(p.y)) - \
CF.read_complete_view_map_pixel(self._l, int(p.x), int(p.y))
- gy = CF.read_complete_view_map_pixel(self._l, int(p.x), int(p.y+self._step)) - \
+ gy = CF.read_complete_view_map_pixel(self._l, int(p.x), int(p.y + self._step)) - \
CF.read_complete_view_map_pixel(self._l, int(p.x), int(p.y))
return Vector((gx, gy))
@@ -184,7 +182,6 @@ class pyViewMapGradientNormF0D(UnaryFunction0DDouble):
CF.read_complete_view_map_pixel(self._l, int(p.x), int(p.y))
return Vector((gx, gy)).length
-
# -- Functions for 1D elements (curves) -- #
@@ -199,11 +196,11 @@ class pyGetSquareInverseProjectedZF1D(UnaryFunction1DDouble):
def __call__(self, inter):
func = GetProjectedZF1D()
z = func(inter)
- return (1.0 - z*z)
+ return (1.0 - pow(z, 2))
class pyDensityAnisotropyF1D(UnaryFunction1DDouble):
- def __init__(self,level, integrationType=IntegrationType.MEAN, sampling=2.0):
+ def __init__(self, level, integrationType=IntegrationType.MEAN, sampling=2.0):
UnaryFunction1DDouble.__init__(self, integrationType)
self._func = pyDensityAnisotropyF0D(level)
self._integration = integrationType
@@ -215,7 +212,7 @@ class pyDensityAnisotropyF1D(UnaryFunction1DDouble):
class pyViewMapGradientNormF1D(UnaryFunction1DDouble):
- def __init__(self,l, integrationType, sampling=2.0):
+ def __init__(self, l, integrationType, sampling=2.0):
UnaryFunction1DDouble.__init__(self, integrationType)
self._func = pyViewMapGradientNormF0D(l)
self._integration = integrationType
diff --git a/release/scripts/freestyle/modules/freestyle/predicates.py b/release/scripts/freestyle/modules/freestyle/predicates.py
index fede3e3e2da..e15820bfdcc 100644
--- a/release/scripts/freestyle/modules/freestyle/predicates.py
+++ b/release/scripts/freestyle/modules/freestyle/predicates.py
@@ -52,6 +52,7 @@ from freestyle.types import (
UnaryPredicate0D,
UnaryPredicate1D,
Id,
+ Interface0DIterator,
)
from freestyle.functions import (
Curvature2DAngleF0D,
@@ -82,11 +83,10 @@ class pyHigherCurvature2DAngleUP0D(UnaryPredicate0D):
def __init__(self, a):
UnaryPredicate0D.__init__(self)
self._a = a
+ self.func = Curvature2DAngleF0D()
def __call__(self, inter):
- func = Curvature2DAngleF0D()
- a = func(inter)
- return (a > self._a)
+ return (self.func(inter) > self._a)
class pyUEqualsUP0D(UnaryPredicate0D):
@@ -102,7 +102,7 @@ class pyUEqualsUP0D(UnaryPredicate0D):
class pyVertexNatureUP0D(UnaryPredicate0D):
- def __init__(self,nature):
+ def __init__(self, nature):
UnaryPredicate0D.__init__(self)
self._nature = nature
@@ -127,7 +127,7 @@ class pyBackTVertexUP0D(UnaryPredicate0D):
class pyParameterUP0DGoodOne(UnaryPredicate0D):
- def __init__(self,pmin,pmax):
+ def __init__(self, pmin, pmax):
UnaryPredicate0D.__init__(self)
self._m = pmin
self._M = pmax
@@ -138,7 +138,7 @@ class pyParameterUP0DGoodOne(UnaryPredicate0D):
class pyParameterUP0D(UnaryPredicate0D):
- def __init__(self,pmin,pmax):
+ def __init__(self, pmin, pmax):
UnaryPredicate0D.__init__(self)
self._m = pmin
self._M = pmax
@@ -154,13 +154,13 @@ class pyParameterUP0D(UnaryPredicate0D):
# -- Unary predicates for 1D elements (curves) -- #
-
class AndUP1D(UnaryPredicate1D):
def __init__(self, *predicates):
UnaryPredicate1D.__init__(self)
self.predicates = predicates
- if len(self.predicates) < 2:
- raise ValueError("Expected two or more UnaryPredicate1D")
+ # there are cases in which only one predicate is supplied (in the parameter editor)
+ if len(self.predicates) < 1:
+ raise ValueError("Expected two or more UnaryPredicate1D, got ", len(predicates))
def __call__(self, inter):
return all(pred(inter) for pred in self.predicates)
@@ -170,8 +170,9 @@ class OrUP1D(UnaryPredicate1D):
def __init__(self, *predicates):
UnaryPredicate1D.__init__(self)
self.predicates = predicates
- if len(self.predicates) < 2:
- raise ValueError("Expected two or more UnaryPredicate1D")
+ # there are cases in which only one predicate is supplied (in the parameter editor)
+ if len(self.predicates) < 1:
+ raise ValueError("Expected two or more UnaryPredicate1D, got ", len(predicates))
def __call__(self, inter):
return any(pred(inter) for pred in self.predicates)
@@ -230,7 +231,7 @@ class pyHigherLengthUP1D(UnaryPredicate1D):
class pyNatureUP1D(UnaryPredicate1D):
- def __init__(self,nature):
+ def __init__(self, nature):
UnaryPredicate1D.__init__(self)
self._nature = nature
self._getNature = CurveNatureF1D()
@@ -244,12 +245,14 @@ class pyHigherNumberOfTurnsUP1D(UnaryPredicate1D):
UnaryPredicate1D.__init__(self)
self._n = n
self._a = a
+ self.func = Curvature2DAngleF0D()
def __call__(self, inter):
- func = Curvature2DAngleF0D()
- it = inter.vertices_begin()
+ it = Interface0DIterator(inter)
# sum the turns, check against n
- return sum(1 for ve in it if func(it) > self._a) > self._n
+ return sum(1 for _ in it if self.func(it) > self._a) > self._n
+ # interesting fact, the line above is 70% faster than:
+ # return sum(self.func(it) > self._a for _ in it) > self._n
class pyDensityUP1D(UnaryPredicate1D):
@@ -345,7 +348,7 @@ class pyZSmallerUP1D(UnaryPredicate1D):
class pyIsOccludedByUP1D(UnaryPredicate1D):
- def __init__(self,id):
+ def __init__(self, id):
UnaryPredicate1D.__init__(self)
if not isinstance(id, Id):
raise TypeError("pyIsOccludedByUP1D expected freestyle.types.Id, not " + type(id).__name__)
@@ -376,7 +379,7 @@ class pyIsOccludedByUP1D(UnaryPredicate1D):
class pyIsInOccludersListUP1D(UnaryPredicate1D):
- def __init__(self,id):
+ def __init__(self, id):
UnaryPredicate1D.__init__(self)
self._id = id
@@ -409,7 +412,7 @@ class pyIsOccludedByIdListUP1D(UnaryPredicate1D):
class pyShapeIdListUP1D(UnaryPredicate1D):
- def __init__(self,idlist):
+ def __init__(self, idlist):
UnaryPredicate1D.__init__(self)
self._funcs = tuple(ShapeUP1D(_id, 0) for _id in idlist)
@@ -417,7 +420,7 @@ class pyShapeIdListUP1D(UnaryPredicate1D):
return any(func(inter) for func in self._funcs)
-## deprecated
+# DEPRECATED
class pyShapeIdUP1D(UnaryPredicate1D):
def __init__(self, _id):
UnaryPredicate1D.__init__(self)
@@ -429,7 +432,7 @@ class pyShapeIdUP1D(UnaryPredicate1D):
class pyHighDensityAnisotropyUP1D(UnaryPredicate1D):
- def __init__(self,threshold, level, sampling=2.0):
+ def __init__(self, threshold, level, sampling=2.0):
UnaryPredicate1D.__init__(self)
self._l = threshold
self.func = pyDensityAnisotropyF1D(level, IntegrationType.MEAN, sampling)
@@ -439,7 +442,7 @@ class pyHighDensityAnisotropyUP1D(UnaryPredicate1D):
class pyHighViewMapGradientNormUP1D(UnaryPredicate1D):
- def __init__(self,threshold, l, sampling=2.0):
+ def __init__(self, threshold, l, sampling=2.0):
UnaryPredicate1D.__init__(self)
self._threshold = threshold
self._GetGradient = pyViewMapGradientNormF1D(l, IntegrationType.MEAN)
@@ -463,8 +466,9 @@ class pyDensityVariableSigmaUP1D(UnaryPredicate1D):
self._sampling = sampling
def __call__(self, inter):
- sigma = (self._sigmaMax-self._sigmaMin)/(self._lmax-self._lmin)*(self._functor(inter)-self._lmin) + self._sigmaMin
- t = (self._tmax-self._tmin)/(self._lmax-self._lmin)*(self._functor(inter)-self._lmin) + self._tmin
+ result = self._functor(inter) - self._lmin
+ sigma = (self._sigmaMax - self._sigmaMin) / (self._lmax - self._lmin) * result + self._sigmaMin
+ t = (self._tmax - self._tmin) / (self._lmax - self._lmin) * result + self._tmin
sigma = max(sigma, self._sigmaMin)
self._func = DensityF1D(sigma, self._integration, self._sampling)
return (self._func(inter) < t)
@@ -480,13 +484,12 @@ class pyClosedCurveUP1D(UnaryPredicate1D):
# -- Binary predicates for 1D elements (curves) -- #
-
class AndBP1D(BinaryPredicate1D):
def __init__(self, *predicates):
BinaryPredicate1D.__init__(self)
self._predicates = predicates
if len(self.predicates) < 2:
- raise ValueError("Expected two or more BinaryPredicate1D")
+ raise ValueError("Expected two or more BinaryPredicate1D, got ", len(predictates))
def __call__(self, i1, i2):
return all(pred(i1, i2) for pred in self._predicates)
@@ -497,7 +500,7 @@ class OrBP1D(BinaryPredicate1D):
BinaryPredicate1D.__init__(self)
self._predicates = predicates
if len(self.predicates) < 2:
- raise ValueError("Expected two or more BinaryPredicate1D")
+ raise ValueError("Expected two or more BinaryPredicate1D, got ", len(predictates))
def __call__(self, i1, i2):
return any(pred(i1, i2) for pred in self._predicates)
@@ -551,11 +554,11 @@ class pyNatureBP1D(BinaryPredicate1D):
class pyViewMapGradientNormBP1D(BinaryPredicate1D):
- def __init__(self,l, sampling=2.0):
+ def __init__(self, l, sampling=2.0):
BinaryPredicate1D.__init__(self)
self._GetGradient = pyViewMapGradientNormF1D(l, IntegrationType.MEAN)
- def __call__(self, i1,i2):
+ def __call__(self, i1, i2):
return (self._GetGradient(i1) > self._GetGradient(i2))
@@ -565,4 +568,4 @@ class pyShuffleBP1D(BinaryPredicate1D):
random.seed = 1
def __call__(self, inter1, inter2):
- return (random.uniform(0,1) < random.uniform(0,1))
+ return (random.uniform(0, 1) < random.uniform(0, 1))
diff --git a/release/scripts/freestyle/modules/freestyle/utils.py b/release/scripts/freestyle/modules/freestyle/utils.py
index 1b576791e9b..b7f4020e997 100644
--- a/release/scripts/freestyle/modules/freestyle/utils.py
+++ b/release/scripts/freestyle/modules/freestyle/utils.py
@@ -27,14 +27,21 @@ from _freestyle import (
integrate,
)
+from freestyle.types import (
+ Interface0DIterator,
+ Stroke,
+ StrokeVertexIterator,
+ )
+
+
from mathutils import Vector
-from functools import lru_cache
+from functools import lru_cache, namedtuple
from math import cos, sin, pi
+from itertools import tee
# -- real utility functions -- #
-
def rgb_to_bw(r, g, b):
""" Method to convert rgb to a bw intensity value. """
return 0.35 * r + 0.45 * g + 0.2 * b
@@ -55,7 +62,6 @@ def bounding_box(stroke):
x, y = zip(*(svert.point for svert in stroke))
return (Vector((min(x), min(y))), Vector((max(x), max(y))))
-
# -- General helper functions -- #
@@ -72,10 +78,12 @@ def phase_to_direction(length):
results.append((phase, Vector((cos(2 * pi * phase), sin(2 * pi * phase)))))
return results
+# A named tuple primitive used for storing data that
+# has an upper and lower bound (eg. thickness, range and certain values)
+BoundedProperty = namedtuple("BoundedProperty", ["min", "max", "delta"])
# -- helper functions for chaining -- #
-
def get_chain_length(ve, orientation):
"""Returns the 2d length of a given ViewEdge """
from freestyle.chainingiterators import pyChainSilhouetteGenericIterator
@@ -112,156 +120,112 @@ def get_chain_length(ve, orientation):
def find_matching_vertex(id, it):
- """Finds the matching vertexn, or returns None """
+ """Finds the matching vertex, or returns None """
return next((ve for ve in it if ve.id == id), None)
-
# -- helper functions for iterating -- #
+def pairwise(iterable, types={Stroke, StrokeVertexIterator}):
+ """Yields a tuple containing the previous and current object """
+ # use .incremented() for types that support it
+ if type(iterable) in types:
+ it = iter(iterable)
+ return zip(it, it.incremented())
+ else:
+ a, b = tee(iterable)
+ next(b, None)
+ return zip(a, b)
-def iter_current_previous(stroke):
- """
- iterates over the given iterator. yields a tuple of the form
- (it, prev, current)
- """
- prev = stroke[0]
- it = Interface0DIterator(stroke)
- for current in it:
- yield (it, prev, current)
+
+def tripplewise(iterable):
+ """Yields a tuple containing the current object and its immediate neighbors """
+ a, b, c = tee(iterable)
+ next(b, None)
+ next(c, None)
+ return zip(a, b, c)
def iter_t2d_along_stroke(stroke):
- """
- Yields the distance between two stroke vertices
- relative to the total stroke length.
- """
+ """ Yields the progress along the stroke """
total = stroke.length_2d
distance = 0.0
- for it, prev, svert in iter_current_previous(stroke):
+ # yield for the comparison from the first vertex to itself
+ yield 0.0
+ for prev, svert in pairwise(stroke):
distance += (prev.point - svert.point).length
- t = min(distance / total, 1.0) if total > 0.0 else 0.0
- yield (it, t)
+ yield min(distance / total, 1.0) if total != 0.0 else 0.0
-def iter_distance_from_camera(stroke, range_min, range_max):
+def iter_distance_from_camera(stroke, range_min, range_max, normfac):
"""
Yields the distance to the camera relative to the maximum
possible distance for every stroke vertex, constrained by
given minimum and maximum values.
"""
- normfac = range_max - range_min # normalization factor
- it = Interface0DIterator(stroke)
- for svert in it:
- distance = svert.point_3d.length # in the camera coordinate
- if distance < range_min:
- t = 0.0
- elif distance > range_max:
- t = 1.0
+ for svert in stroke:
+ # length in the camera coordinate
+ distance = svert.point_3d.length
+ if range_min < distance < range_max:
+ yield (svert, (distance - range_min) / normfac)
else:
- t = (distance - range_min) / normfac
- yield (it, t)
+ yield (svert, 0.0) if range_min > distance else (svert, 1.0)
-def iter_distance_from_object(stroke, object, range_min, range_max):
+def iter_distance_from_object(stroke, location, range_min, range_max, normfac):
"""
yields the distance to the given object relative to the maximum
possible distance for every stroke vertex, constrained by
given minimum and maximum values.
"""
- scene = getCurrentScene()
- mv = scene.camera.matrix_world.copy().inverted() # model-view matrix
- loc = mv * object.location # loc in the camera coordinate
- normfac = range_max - range_min # normalization factor
- it = Interface0DIterator(stroke)
- for svert in it:
- distance = (svert.point_3d - loc).length # in the camera coordinate
- if distance < range_min:
- t = 0.0
- elif distance > range_max:
- t = 1.0
- else:
- t = (distance - range_min) / normfac
- yield (it, t)
-
-
-def iter_material_color(stroke, material_attribute):
- """
- yields the specified material attribute for every stroke vertex.
- the material is taken from the object behind the vertex.
- """
- func = CurveMaterialF0D()
- it = Interface0DIterator(stroke)
- for inter in it:
- material = func(it)
- if material_attribute == 'DIFF':
- color = material.diffuse[0:3]
- elif material_attribute == 'SPEC':
- color = material.specular[0:3]
+ for svert in stroke:
+ distance = (svert.point_3d - location).length # in the camera coordinate
+ if range_min < distance < range_max:
+ yield (svert, (distance - range_min) / normfac)
else:
- raise ValueError("unexpected material attribute: " + material_attribute)
- yield (it, color)
+ yield (svert, 0.0) if distance < range_min else (svert, 1.0)
-def iter_material_value(stroke, material_attribute):
- """
- yields a specific material attribute
- from the vertex' underlying material.
- """
- func = CurveMaterialF0D()
+def iter_material_value(stroke, func, attribute):
+ "Yields a specific material attribute from the vertex' underlying material. "
it = Interface0DIterator(stroke)
for svert in it:
material = func(it)
- if material_attribute == 'DIFF':
- t = rgb_to_bw(*material.diffuse[0:3])
- elif material_attribute == 'DIFF_R':
- t = material.diffuse[0]
- elif material_attribute == 'DIFF_G':
- t = material.diffuse[1]
- elif material_attribute == 'DIFF_B':
- t = material.diffuse[2]
- elif material_attribute == 'SPEC':
- t = rgb_to_bw(*material.specular[0:3])
- elif material_attribute == 'SPEC_R':
- t = material.specular[0]
- elif material_attribute == 'SPEC_G':
- t = material.specular[1]
- elif material_attribute == 'SPEC_B':
- t = material.specular[2]
- elif material_attribute == 'SPEC_HARDNESS':
- t = material.shininess
- elif material_attribute == 'ALPHA':
- t = material.diffuse[3]
+ # main
+ if attribute == 'DIFF':
+ value = rgb_to_bw(*material.diffuse[0:3])
+ elif attribute == 'ALPHA':
+ value = material.diffuse[3]
+ elif attribute == 'SPEC':
+ value = rgb_to_bw(*material.specular[0:3])
+ # diffuse seperate
+ elif attribute == 'DIFF_R':
+ value = material.diffuse[0]
+ elif attribute == 'DIFF_G':
+ value = material.diffuse[1]
+ elif attribute == 'DIFF_B':
+ value = material.diffuse[2]
+ # specular seperate
+ elif attribute == 'SPEC_R':
+ value = material.specular[0]
+ elif attribute == 'SPEC_G':
+ value = material.specular[1]
+ elif attribute == 'SPEC_B':
+ value = material.specular[2]
+ elif attribute == 'SPEC_HARDNESS':
+ value = material.shininess
else:
- raise ValueError("unexpected material attribute: " + material_attribute)
- yield (it, t)
-
+ raise ValueError("unexpected material attribute: " + attribute)
+ yield (svert, value)
def iter_distance_along_stroke(stroke):
- """
- yields the absolute distance between
- the current and preceding vertex.
- """
+ "Yields the absolute distance along the stroke up to the current vertex."
distance = 0.0
- prev = stroke[0]
- it = Interface0DIterator(stroke)
- for svert in it:
- p = svert.point
- distance += (prev - p).length
- prev = p.copy() # need a copy because the point can be altered
- yield it, distance
-
-
-def iter_triplet(it):
- """
- Iterates over it, yielding a tuple containing
- the current vertex and its immediate neighbors
- """
- prev = next(it)
- current = next(it)
- for succ in it:
- yield prev, current, succ
- prev, current = current, succ
-
+ # the positions need to be copied, because they are changed in the calling function
+ points = tuple(svert.point.copy() for svert in stroke)
+ yield distance
+ for prev, curr in pairwise(points):
+ distance += (prev - curr).length
+ yield distance
# -- mathmatical operations -- #
@@ -272,55 +236,73 @@ def stroke_curvature(it):
K = 1 / R
where R is the radius of the circle going through the current vertex and its neighbors
"""
+ for _ in it:
+ if (it.is_begin or it.is_end):
+ yield 0.0
+ continue
+ else:
+ it.decrement()
+ prev, current, succ = it.object.point.copy(), next(it).point.copy(), next(it).point.copy()
+ # return the iterator in an unchanged state
+ it.decrement()
- if it.is_end or it.is_begin:
- return 0.0
-
- next = it.incremented().point
- prev = it.decremented().point
- current = it.object.point
-
-
- ab = (current - prev)
- bc = (next - current)
- ac = (prev - next)
-
- a, b, c = ab.length, bc.length, ac.length
-
- try:
- area = 0.5 * ab.cross(ac)
- K = (4 * area) / (a * b * c)
- K = bound(0.0, K, 1.0)
+ ab = (current - prev)
+ bc = (succ - current)
+ ac = (prev - succ)
- except ZeroDivisionError:
- K = 0.0
+ a, b, c = ab.length, bc.length, ac.length
- return K
+ try:
+ area = 0.5 * ab.cross(ac)
+ K = (4 * area) / (a * b * c)
+ except ZeroDivisionError:
+ K = 0.0
+ yield abs(K)
-def stroke_normal(it):
+def stroke_normal(stroke):
"""
Compute the 2D normal at the stroke vertex pointed by the iterator
'it'. It is noted that Normal2DF0D computes normals based on
underlying FEdges instead, which is inappropriate for strokes when
they have already been modified by stroke geometry modifiers.
+
+ The returned normals are dynamic: they update when the
+ vertex position (and therefore the vertex normal) changes.
+ for use in geometry modifiers it is advised to
+ cast this generator function to a tuple or list
"""
- # first stroke segment
- it_next = it.incremented()
- if it.is_begin:
- e = it_next.object.point_2d - it.object.point_2d
- n = Vector((e[1], -e[0]))
- return n.normalized()
- # last stroke segment
- it_prev = it.decremented()
- if it_next.is_end:
- e = it.object.point_2d - it_prev.object.point_2d
- n = Vector((e[1], -e[0]))
- return n.normalized()
- # two subsequent stroke segments
- e1 = it_next.object.point_2d - it.object.point_2d
- e2 = it.object.point_2d - it_prev.object.point_2d
- n1 = Vector((e1[1], -e1[0])).normalized()
- n2 = Vector((e2[1], -e2[0])).normalized()
- n = (n1 + n2)
- return n.normalized()
+ n = len(stroke) - 1
+
+ for i, svert in enumerate(stroke):
+ if i == 0:
+ e = stroke[i + 1].point - svert.point
+ yield Vector((e[1], -e[0])).normalized()
+ elif i == n:
+ e = svert.point - stroke[i - 1].point
+ yield Vector((e[1], -e[0])).normalized()
+ else:
+ e1 = stroke[i + 1].point - svert.point
+ e2 = svert.point - stroke[i - 1].point
+ n1 = Vector((e1[1], -e1[0])).normalized()
+ n2 = Vector((e2[1], -e2[0])).normalized()
+ yield (n1 + n2).normalized()
+
+def get_test_stroke():
+ """Returns a static stroke object for testing """
+ from freestyle.types import Stroke, Interface0DIterator, StrokeVertexIterator, SVertex, Id, StrokeVertex
+ # points for our fake stroke
+ points = (Vector((1.0, 5.0, 3.0)), Vector((1.0, 2.0, 9.0)),
+ Vector((6.0, 2.0, 3.0)), Vector((7.0, 2.0, 3.0)),
+ Vector((2.0, 6.0, 3.0)), Vector((2.0, 8.0, 3.0)))
+ ids = (Id(0, 0), Id(1, 1), Id(2, 2), Id(3, 3), Id(4, 4), Id(5, 5))
+
+ stroke = Stroke()
+ it = iter(stroke)
+
+ for svert in map(SVertex, points, ids):
+ stroke.insert_vertex(StrokeVertex(svert), it)
+ it = iter(stroke)
+
+ stroke.update_length()
+ return stroke
diff --git a/release/scripts/freestyle/modules/parameter_editor.py b/release/scripts/freestyle/modules/parameter_editor.py
index 3529221c5b5..92814e87b1c 100644
--- a/release/scripts/freestyle/modules/parameter_editor.py
+++ b/release/scripts/freestyle/modules/parameter_editor.py
@@ -32,6 +32,8 @@ from freestyle.types import (
UnaryPredicate0D,
UnaryPredicate1D,
TVertex,
+ Material,
+ ViewEdge,
)
from freestyle.chainingiterators import (
ChainPredicateIterator,
@@ -41,10 +43,10 @@ from freestyle.chainingiterators import (
)
from freestyle.functions import (
Curvature2DAngleF0D,
- CurveMaterialF0D,
Normal2DF0D,
QuantitativeInvisibilityF1D,
VertexOrientation2DF0D,
+ CurveMaterialF0D,
)
from freestyle.predicates import (
AndUP1D,
@@ -78,70 +80,78 @@ from freestyle.shaders import (
pyBluePrintCirclesShader,
pyBluePrintEllipsesShader,
pyBluePrintSquaresShader,
+ RoundCapShader,
+ SquareCapShader,
)
from freestyle.utils import (
ContextFunctions,
getCurrentScene,
+ iter_distance_along_stroke,
+ iter_t2d_along_stroke,
+ iter_distance_from_camera,
+ iter_distance_from_object,
+ iter_material_value,
stroke_normal,
+ bound,
+ pairwise,
+ BoundedProperty
)
from _freestyle import (
blendRamp,
evaluateColorRamp,
evaluateCurveMappingF,
)
-import math
-import mathutils
+
import time
+from mathutils import Vector
+from math import pi, sin, cos, acos, radians
+from itertools import cycle, tee
class ColorRampModifier(StrokeShader):
+ """Primitive for the color modifiers """
def __init__(self, blend, influence, ramp):
StrokeShader.__init__(self)
- self.__blend = blend
- self.__influence = influence
- self.__ramp = ramp
+ self.blend = blend
+ self.influence = influence
+ self.ramp = ramp
def evaluate(self, t):
- col = evaluateColorRamp(self.__ramp, t)
- col = col.xyz # omit alpha
- return col
+ col = evaluateColorRamp(self.ramp, t)
+ return col.xyz # omit alpha
def blend_ramp(self, a, b):
- return blendRamp(self.__blend, a, self.__influence, b)
+ return blendRamp(self.blend, a, self.influence, b)
class ScalarBlendModifier(StrokeShader):
- def __init__(self, blend, influence):
+ """Primitive for alpha and thickness modifiers """
+ def __init__(self, blend_type, influence):
StrokeShader.__init__(self)
- self.__blend = blend
- self.__influence = influence
+ self.blend_type = blend_type
+ self.influence = influence
def blend(self, v1, v2):
- fac = self.__influence
+ fac = self.influence
facm = 1.0 - fac
- if self.__blend == 'MIX':
+ if self.blend_type == 'MIX':
v1 = facm * v1 + fac * v2
- elif self.__blend == 'ADD':
+ elif self.blend_type == 'ADD':
v1 += fac * v2
- elif self.__blend == 'MULTIPLY':
+ elif self.blend_type == 'MULTIPLY':
v1 *= facm + fac * v2
- elif self.__blend == 'SUBTRACT':
+ elif self.blend_type == 'SUBTRACT':
v1 -= fac * v2
- elif self.__blend == 'DIVIDE':
- if v2 != 0.0:
- v1 = facm * v1 + fac * v1 / v2
- elif self.__blend == 'DIFFERENCE':
+ elif self.blend_type == 'DIVIDE':
+ v1 = facm * v1 + fac * v1 / v2 if v2 != 0.0 else v1
+ elif self.blend_type == 'DIFFERENCE':
v1 = facm * v1 + fac * abs(v1 - v2)
- elif self.__blend == 'MININUM':
- tmp = fac * v2
- if v1 > tmp:
- v1 = tmp
- elif self.__blend == 'MAXIMUM':
- tmp = fac * v2
- if v1 < tmp:
- v1 = tmp
+ elif self.blend_type == 'MININUM':
+ v1 = min(fac * v2, v1)
+ elif self.blend_type == 'MAXIMUM':
+ v1 = max(fac * v2, v1)
else:
- raise ValueError("unknown curve blend type: " + self.__blend)
+ raise ValueError("unknown curve blend type: " + self.blend_type)
return v1
@@ -149,34 +159,28 @@ class CurveMappingModifier(ScalarBlendModifier):
def __init__(self, blend, influence, mapping, invert, curve):
ScalarBlendModifier.__init__(self, blend, influence)
assert mapping in {'LINEAR', 'CURVE'}
- self.__mapping = getattr(self, mapping)
- self.__invert = invert
- self.__curve = curve
+ self.evaluate = getattr(self, mapping)
+ self.invert = invert
+ self.curve = curve
def LINEAR(self, t):
- if self.__invert:
- return 1.0 - t
- return t
+ return (1.0 - t) if self.invert else t
def CURVE(self, t):
- return evaluateCurveMappingF(self.__curve, 0, t)
-
- def evaluate(self, t):
- return self.__mapping(t)
+ return evaluateCurveMappingF(self.curve, 0, t)
class ThicknessModifierMixIn:
def __init__(self):
scene = getCurrentScene()
- self.__persp_camera = (scene.camera.data.type == 'PERSP')
+ self.persp_camera = (scene.camera.data.type == 'PERSP')
def set_thickness(self, sv, outer, inner):
- fe = sv.first_svertex.get_fedge(sv.second_svertex)
+ fe = sv.fedge
nature = fe.nature
if (nature & Nature.BORDER):
- if self.__persp_camera:
- point = -sv.point_3d.copy()
- point.normalize()
+ if self.persp_camera:
+ point = -sv.point_3d.normalized()
dir = point.dot(fe.normal_left)
else:
dir = fe.normal_left.z
@@ -193,30 +197,42 @@ class ThicknessModifierMixIn:
class ThicknessBlenderMixIn(ThicknessModifierMixIn):
def __init__(self, position, ratio):
ThicknessModifierMixIn.__init__(self)
- self.__position = position
- self.__ratio = ratio
+ self.position = position
+ self.ratio = ratio
- def blend_thickness(self, outer, inner, v):
+ def blend_thickness(self, svert, v):
+ """ Blends and sets the thickness."""
+ outer, inner = svert.attribute.thickness
+ fe = svert.fedge
v = self.blend(outer + inner, v)
- if self.__position == 'CENTER':
- outer = v * 0.5
- inner = v - outer
- elif self.__position == 'INSIDE':
- outer = 0
- inner = v
- elif self.__position == 'OUTSIDE':
- outer = v
- inner = 0
- elif self.__position == 'RELATIVE':
- outer = v * self.__ratio
- inner = v - outer
- else:
- raise ValueError("unknown thickness position: " + self.__position)
- return outer, inner
+ # Part 1: blend
+ if self.position == "CENTER":
+ outer = inner = v * 0.5
+ elif self.position == "INSIDE":
+ outer, inner = 0, v
+ elif self.position == "OUTSIDE":
+ outer, inner = v, 0
+ elif self.position == "RELATIVE":
+ outer, inner = v * self.ratio, v - (v * self.ratio)
+ else:
+ raise ValueError("unknown thickness position: " + position)
-class BaseColorShader(ConstantColorShader):
- pass
+ # Part 2: set
+ if (fe.nature & Nature.BORDER):
+ if self.persp_camera:
+ point = -svert.point_3d.normalized()
+ dir = point.dot(fe.normal_left)
+ else:
+ dir = fe.normal_left.z
+ if dir < 0.0: # the back side is visible
+ outer, inner = inner, outer
+ elif (fe.nature & Nature.SILHOUETTE):
+ if fe.is_smooth: # TODO more tests needed
+ outer, inner = inner, outer
+ else:
+ outer = inner = (outer + inner) / 2
+ svert.attribute.thickness = (outer, inner)
class BaseThicknessShader(StrokeShader, ThicknessModifierMixIn):
@@ -224,520 +240,378 @@ class BaseThicknessShader(StrokeShader, ThicknessModifierMixIn):
StrokeShader.__init__(self)
ThicknessModifierMixIn.__init__(self)
if position == 'CENTER':
- self.__outer = thickness * 0.5
- self.__inner = thickness - self.__outer
+ self.outer = thickness * 0.5
+ self.inner = thickness - self.outer
elif position == 'INSIDE':
- self.__outer = 0
- self.__inner = thickness
+ self.outer = 0
+ self.inner = thickness
elif position == 'OUTSIDE':
- self.__outer = thickness
- self.__inner = 0
+ self.outer = thickness
+ self.inner = 0
elif position == 'RELATIVE':
- self.__outer = thickness * ratio
- self.__inner = thickness - self.__outer
+ self.outer = thickness * ratio
+ self.inner = thickness - self.outer
else:
- raise ValueError("unknown thickness position: " + self.position)
+ raise ValueError("unknown thickness position: " + position)
def shade(self, stroke):
- it = stroke.stroke_vertices_begin()
- while not it.is_end:
- sv = it.object
- self.set_thickness(sv, self.__outer, self.__inner)
- it.increment()
+ for svert in stroke:
+ self.set_thickness(svert, self.outer, self.inner)
# Along Stroke modifiers
-def iter_t2d_along_stroke(stroke):
- total = stroke.length_2d
- distance = 0.0
- it = stroke.stroke_vertices_begin()
- prev = it.object.point
- while not it.is_end:
- p = it.object.point
- distance += (prev - p).length
- prev = p.copy() # need a copy because the point can be altered
- t = min(distance / total, 1.0) if total > 0.0 else 0.0
- yield it, t
- it.increment()
-
-
class ColorAlongStrokeShader(ColorRampModifier):
+ """Maps a ramp to the color of the stroke, using the curvilinear abscissa (t) """
def shade(self, stroke):
- for it, t in iter_t2d_along_stroke(stroke):
- sv = it.object
- a = sv.attribute.color
+ for svert, t in zip(stroke, iter_t2d_along_stroke(stroke)):
+ a = svert.attribute.color
b = self.evaluate(t)
- sv.attribute.color = self.blend_ramp(a, b)
+ svert.attribute.color = self.blend_ramp(a, b)
class AlphaAlongStrokeShader(CurveMappingModifier):
+ """Maps a curve to the alpha/transparancy of the stroke, using the curvilinear abscissa (t) """
def shade(self, stroke):
- for it, t in iter_t2d_along_stroke(stroke):
- sv = it.object
- a = sv.attribute.alpha
+ for svert, t in zip(stroke, iter_t2d_along_stroke(stroke)):
+ a = svert.attribute.alpha
b = self.evaluate(t)
- sv.attribute.alpha = self.blend(a, b)
+ svert.attribute.alpha = self.blend(a, b)
class ThicknessAlongStrokeShader(ThicknessBlenderMixIn, CurveMappingModifier):
+ """Maps a curve to the thickness of the stroke, using the curvilinear abscissa (t) """
def __init__(self, thickness_position, thickness_ratio,
blend, influence, mapping, invert, curve, value_min, value_max):
ThicknessBlenderMixIn.__init__(self, thickness_position, thickness_ratio)
CurveMappingModifier.__init__(self, blend, influence, mapping, invert, curve)
- self.__value_min = value_min
- self.__value_max = value_max
+ self.value = BoundedProperty(value_min, value_max, value_max - value_min)
def shade(self, stroke):
- for it, t in iter_t2d_along_stroke(stroke):
- sv = it.object
- a = sv.attribute.thickness
- b = self.__value_min + self.evaluate(t) * (self.__value_max - self.__value_min)
- c = self.blend_thickness(a[0], a[1], b)
- self.set_thickness(sv, c[0], c[1])
-
-
-# Distance from Camera modifiers
-
-def iter_distance_from_camera(stroke, range_min, range_max):
- normfac = range_max - range_min # normalization factor
- it = stroke.stroke_vertices_begin()
- while not it.is_end:
- p = it.object.point_3d # in the camera coordinate
- distance = p.length
- if distance < range_min:
- t = 0.0
- elif distance > range_max:
- t = 1.0
- else:
- t = (distance - range_min) / normfac
- yield it, t
- it.increment()
+ for svert, t in zip(stroke, iter_t2d_along_stroke(stroke)):
+ b = self.value.min + self.evaluate(t) * self.value.delta
+ self.blend_thickness(svert, b)
+# -- Distance from Camera modifiers -- #
+
class ColorDistanceFromCameraShader(ColorRampModifier):
+ """Picks a color value from a ramp based on the vertex' distance from the camera """
def __init__(self, blend, influence, ramp, range_min, range_max):
ColorRampModifier.__init__(self, blend, influence, ramp)
- self.__range_min = range_min
- self.__range_max = range_max
+ self.range = BoundedProperty(range_min, range_max, range_max - range_min)
def shade(self, stroke):
- for it, t in iter_distance_from_camera(stroke, self.__range_min, self.__range_max):
- sv = it.object
- a = sv.attribute.color
+ it = iter_distance_from_camera(stroke, *self.range)
+ for svert, t in it:
+ a = svert.attribute.color
b = self.evaluate(t)
- sv.attribute.color = self.blend_ramp(a, b)
+ svert.attribute.color = self.blend_ramp(a, b)
class AlphaDistanceFromCameraShader(CurveMappingModifier):
+ """Picks an alpha value from a curve based on the vertex' distance from the camera """
def __init__(self, blend, influence, mapping, invert, curve, range_min, range_max):
CurveMappingModifier.__init__(self, blend, influence, mapping, invert, curve)
- self.__range_min = range_min
- self.__range_max = range_max
+ self.range = BoundedProperty(range_min, range_max, range_max - range_min)
def shade(self, stroke):
- for it, t in iter_distance_from_camera(stroke, self.__range_min, self.__range_max):
- sv = it.object
- a = sv.attribute.alpha
+ it = iter_distance_from_camera(stroke, *self.range)
+ for svert, t in it:
+ a = svert.attribute.alpha
b = self.evaluate(t)
- sv.attribute.alpha = self.blend(a, b)
+ svert.attribute.alpha = self.blend(a, b)
class ThicknessDistanceFromCameraShader(ThicknessBlenderMixIn, CurveMappingModifier):
+ """Picks a thickness value from a curve based on the vertex' distance from the camera """
def __init__(self, thickness_position, thickness_ratio,
blend, influence, mapping, invert, curve, range_min, range_max, value_min, value_max):
ThicknessBlenderMixIn.__init__(self, thickness_position, thickness_ratio)
CurveMappingModifier.__init__(self, blend, influence, mapping, invert, curve)
- self.__range_min = range_min
- self.__range_max = range_max
- self.__value_min = value_min
- self.__value_max = value_max
+ self.range = BoundedProperty(range_min, range_max, range_max - range_min)
+ self.value = BoundedProperty(value_min, value_max, value_max - value_min)
def shade(self, stroke):
- for it, t in iter_distance_from_camera(stroke, self.__range_min, self.__range_max):
- sv = it.object
- a = sv.attribute.thickness
- b = self.__value_min + self.evaluate(t) * (self.__value_max - self.__value_min)
- c = self.blend_thickness(a[0], a[1], b)
- self.set_thickness(sv, c[0], c[1])
+ for (svert, t) in iter_distance_from_camera(stroke, *self.range):
+ b = self.value.min + self.evaluate(t) * self.value.delta
+ self.blend_thickness(svert, b)
# Distance from Object modifiers
-def iter_distance_from_object(stroke, object, range_min, range_max):
- scene = getCurrentScene()
- mv = scene.camera.matrix_world.copy() # model-view matrix
- mv.invert()
- loc = mv * object.location # loc in the camera coordinate
- normfac = range_max - range_min # normalization factor
- it = stroke.stroke_vertices_begin()
- while not it.is_end:
- p = it.object.point_3d # in the camera coordinate
- distance = (p - loc).length
- if distance < range_min:
- t = 0.0
- elif distance > range_max:
- t = 1.0
- else:
- t = (distance - range_min) / normfac
- yield it, t
- it.increment()
-
-
class ColorDistanceFromObjectShader(ColorRampModifier):
+ """Picks a color value from a ramp based on the vertex' distance from a given object """
def __init__(self, blend, influence, ramp, target, range_min, range_max):
ColorRampModifier.__init__(self, blend, influence, ramp)
- self.__target = target
- self.__range_min = range_min
- self.__range_max = range_max
+ if target is None:
+ raise ValueError("ColorDistanceFromObjectShader: target can't be None ")
+ self.range = BoundedProperty(range_min, range_max, range_max - range_min)
+ # construct a model-view matrix
+ matrix = getCurrentScene().camera.matrix_world.inverted()
+ # get the object location in the camera coordinate
+ self.loc = matrix * target.location
def shade(self, stroke):
- if self.__target is None:
- return
- for it, t in iter_distance_from_object(stroke, self.__target, self.__range_min, self.__range_max):
- sv = it.object
- a = sv.attribute.color
+ it = iter_distance_from_object(stroke, self.loc, *self.range)
+ for svert, t in it:
+ a = svert.attribute.color
b = self.evaluate(t)
- sv.attribute.color = self.blend_ramp(a, b)
+ svert.attribute.color = self.blend_ramp(a, b)
class AlphaDistanceFromObjectShader(CurveMappingModifier):
+ """Picks an alpha value from a curve based on the vertex' distance from a given object """
def __init__(self, blend, influence, mapping, invert, curve, target, range_min, range_max):
CurveMappingModifier.__init__(self, blend, influence, mapping, invert, curve)
- self.__target = target
- self.__range_min = range_min
- self.__range_max = range_max
+ if target is None:
+ raise ValueError("AlphaDistanceFromObjectShader: target can't be None ")
+ self.range = BoundedProperty(range_min, range_max, range_max - range_min)
+ # construct a model-view matrix
+ matrix = getCurrentScene().camera.matrix_world.inverted()
+ # get the object location in the camera coordinate
+ self.loc = matrix * target.location
def shade(self, stroke):
- if self.__target is None:
- return
- for it, t in iter_distance_from_object(stroke, self.__target, self.__range_min, self.__range_max):
- sv = it.object
- a = sv.attribute.alpha
+ it = iter_distance_from_object(stroke, self.loc, *self.range)
+ for svert, t in it:
+ a = svert.attribute.alpha
b = self.evaluate(t)
- sv.attribute.alpha = self.blend(a, b)
+ svert.attribute.alpha = self.blend(a, b)
class ThicknessDistanceFromObjectShader(ThicknessBlenderMixIn, CurveMappingModifier):
+ """Picks a thickness value from a curve based on the vertex' distance from a given object """
def __init__(self, thickness_position, thickness_ratio,
blend, influence, mapping, invert, curve, target, range_min, range_max, value_min, value_max):
ThicknessBlenderMixIn.__init__(self, thickness_position, thickness_ratio)
CurveMappingModifier.__init__(self, blend, influence, mapping, invert, curve)
- self.__target = target
- self.__range_min = range_min
- self.__range_max = range_max
- self.__value_min = value_min
- self.__value_max = value_max
+ if target is None:
+ raise ValueError("ThicknessDistanceFromObjectShader: target can't be None ")
+ self.range = BoundedProperty(range_min, range_max, range_max - range_min)
+ self.value = BoundedProperty(value_min, value_max, value_max - value_min)
+ # construct a model-view matrix
+ matrix = getCurrentScene().camera.matrix_world.inverted()
+ # get the object location in the camera coordinate
+ self.loc = matrix * target.location
def shade(self, stroke):
- if self.__target is None:
- return
- for it, t in iter_distance_from_object(stroke, self.__target, self.__range_min, self.__range_max):
- sv = it.object
- a = sv.attribute.thickness
- b = self.__value_min + self.evaluate(t) * (self.__value_max - self.__value_min)
- c = self.blend_thickness(a[0], a[1], b)
- self.set_thickness(sv, c[0], c[1])
-
+ it = iter_distance_from_object(stroke, self.loc, *self.range)
+ for svert, t in it:
+ b = self.value.min + self.evaluate(t) * self.value.delta
+ self.blend_thickness(svert, b)
# Material modifiers
-
-def iter_material_color(stroke, material_attribute):
- func = CurveMaterialF0D()
- it = stroke.stroke_vertices_begin()
- while not it.is_end:
- material = func(Interface0DIterator(it))
- if material_attribute == 'LINE':
- color = material.line[0:3]
- elif material_attribute == 'DIFF':
- color = material.diffuse[0:3]
- elif material_attribute == 'SPEC':
- color = material.specular[0:3]
- else:
- raise ValueError("unexpected material attribute: " + material_attribute)
- yield it, color
- it.increment()
-
-
-def iter_material_value(stroke, material_attribute):
- func = CurveMaterialF0D()
- it = stroke.stroke_vertices_begin()
- while not it.is_end:
- material = func(Interface0DIterator(it))
- if material_attribute == 'LINE':
- r, g, b = material.line[0:3]
- t = 0.35 * r + 0.45 * g + 0.2 * b
- elif material_attribute == 'LINE_R':
- t = material.line[0]
- elif material_attribute == 'LINE_G':
- t = material.line[1]
- elif material_attribute == 'LINE_B':
- t = material.line[2]
- elif material_attribute == 'ALPHA':
- t = material.line[3]
- elif material_attribute == 'DIFF':
- r, g, b = material.diffuse[0:3]
- t = 0.35 * r + 0.45 * g + 0.2 * b
- elif material_attribute == 'DIFF_R':
- t = material.diffuse[0]
- elif material_attribute == 'DIFF_G':
- t = material.diffuse[1]
- elif material_attribute == 'DIFF_B':
- t = material.diffuse[2]
- elif material_attribute == 'SPEC':
- r, g, b = material.specular[0:3]
- t = 0.35 * r + 0.45 * g + 0.2 * b
- elif material_attribute == 'SPEC_R':
- t = material.specular[0]
- elif material_attribute == 'SPEC_G':
- t = material.specular[1]
- elif material_attribute == 'SPEC_B':
- t = material.specular[2]
- elif material_attribute == 'SPEC_HARDNESS':
- t = material.shininess
- else:
- raise ValueError("unexpected material attribute: " + material_attribute)
- yield it, t
- it.increment()
-
-
class ColorMaterialShader(ColorRampModifier):
+ """ Assigns a color to the vertices based on their underlying material """
def __init__(self, blend, influence, ramp, material_attribute, use_ramp):
ColorRampModifier.__init__(self, blend, influence, ramp)
- self.__material_attribute = material_attribute
- self.__use_ramp = use_ramp
-
- def shade(self, stroke):
- if self.__material_attribute in {'LINE', 'DIFF', 'SPEC'} and not self.__use_ramp:
- for it, b in iter_material_color(stroke, self.__material_attribute):
- sv = it.object
- a = sv.attribute.color
- sv.attribute.color = self.blend_ramp(a, b)
+ self.attribute = material_attribute
+ self.use_ramp = use_ramp
+ self.func = CurveMaterialF0D()
+
+ def shade(self, stroke, attributes={'DIFF', 'SPEC', 'LINE'}):
+ it = Interface0DIterator(stroke)
+ if not self.use_ramp and self.attribute in attributes:
+ for svert in it:
+ material = self.func(it)
+ if self.attribute == 'DIFF':
+ b = material.diffuse[0:3]
+ elif self.attribute == 'LINE':
+ b = material.line[0:3]
+ else:
+ b = material.specular[0:3]
+ a = svert.attribute.color
+ svert.attribute.color = self.blend_ramp(a, b)
else:
- for it, t in iter_material_value(stroke, self.__material_attribute):
- sv = it.object
- a = sv.attribute.color
- b = self.evaluate(t)
- sv.attribute.color = self.blend_ramp(a, b)
-
+ for svert, value in iter_material_value(stroke, self.func, self.attribute):
+ a = svert.attribute.color
+ b = self.evaluate(value)
+ svert.attribute.color = self.blend_ramp(a, b)
class AlphaMaterialShader(CurveMappingModifier):
+ """ Assigns an alpha value to the vertices based on their underlying material """
def __init__(self, blend, influence, mapping, invert, curve, material_attribute):
CurveMappingModifier.__init__(self, blend, influence, mapping, invert, curve)
- self.__material_attribute = material_attribute
+ self.attribute = material_attribute
+ self.func = CurveMaterialF0D()
def shade(self, stroke):
- for it, t in iter_material_value(stroke, self.__material_attribute):
- sv = it.object
- a = sv.attribute.alpha
- b = self.evaluate(t)
- sv.attribute.alpha = self.blend(a, b)
+ for svert, value in iter_material_value(stroke, self.func, self.attribute):
+ a = svert.attribute.alpha
+ b = self.evaluate(value)
+ svert.attribute.alpha = self.blend(a, b)
class ThicknessMaterialShader(ThicknessBlenderMixIn, CurveMappingModifier):
+ """ Assigns a thickness value to the vertices based on their underlying material """
def __init__(self, thickness_position, thickness_ratio,
blend, influence, mapping, invert, curve, material_attribute, value_min, value_max):
ThicknessBlenderMixIn.__init__(self, thickness_position, thickness_ratio)
CurveMappingModifier.__init__(self, blend, influence, mapping, invert, curve)
- self.__material_attribute = material_attribute
- self.__value_min = value_min
- self.__value_max = value_max
+ self.attribute = material_attribute
+ self.value = BoundedProperty(value_min, value_max, value_max - value_min)
+ self.func = CurveMaterialF0D()
def shade(self, stroke):
- for it, t in iter_material_value(stroke, self.__material_attribute):
- sv = it.object
- a = sv.attribute.thickness
- b = self.__value_min + self.evaluate(t) * (self.__value_max - self.__value_min)
- c = self.blend_thickness(a[0], a[1], b)
- self.set_thickness(sv, c[0], c[1])
+ for svert, value in iter_material_value(stroke, self.func, self.attribute):
+ b = self.value.min + self.evaluate(value) * self.value.delta
+ self.blend_thickness(svert, b)
# Calligraphic thickness modifier
+
class CalligraphicThicknessShader(ThicknessBlenderMixIn, ScalarBlendModifier):
+ """Thickness modifier for achieving a calligraphy-like effect """
def __init__(self, thickness_position, thickness_ratio,
- blend, influence, orientation, thickness_min, thickness_max):
+ blend_type, influence, orientation, thickness_min, thickness_max):
ThicknessBlenderMixIn.__init__(self, thickness_position, thickness_ratio)
- ScalarBlendModifier.__init__(self, blend, influence)
- self.__orientation = mathutils.Vector((math.cos(orientation), math.sin(orientation)))
- self.__thickness_min = thickness_min
- self.__thickness_max = thickness_max
+ ScalarBlendModifier.__init__(self, blend_type, influence)
+ self.orientation = Vector((cos(orientation), sin(orientation)))
+ self.thickness = BoundedProperty(thickness_min, thickness_max, thickness_max - thickness_min)
+ self.func = VertexOrientation2DF0D()
def shade(self, stroke):
- func = VertexOrientation2DF0D()
- it = stroke.stroke_vertices_begin()
- while not it.is_end:
- dir = func(Interface0DIterator(it))
- orthDir = mathutils.Vector((-dir.y, dir.x))
- orthDir.normalize()
- fac = abs(orthDir * self.__orientation)
- sv = it.object
- a = sv.attribute.thickness
- b = self.__thickness_min + fac * (self.__thickness_max - self.__thickness_min)
- b = max(b, 0.0)
- c = self.blend_thickness(a[0], a[1], b)
- self.set_thickness(sv, c[0], c[1])
- it.increment()
+ it = Interface0DIterator(stroke)
+ for svert in it:
+ dir = self.func(it)
+ if dir.length != 0.0:
+ dir.normalize()
+ fac = abs(dir.orthogonal() * self.orientation)
+ b = self.thickness.min + fac * self.thickness.delta
+ else:
+ b = self.thickness.min
+ self.blend_thickness(svert, b)
# Geometry modifiers
-def iter_distance_along_stroke(stroke):
- distance = 0.0
- it = stroke.stroke_vertices_begin()
- prev = it.object.point
- while not it.is_end:
- p = it.object.point
- distance += (prev - p).length
- prev = p.copy() # need a copy because the point can be altered
- yield it, distance
- it.increment()
-
-
class SinusDisplacementShader(StrokeShader):
+ """Displaces the stroke in a sinewave-like shape """
def __init__(self, wavelength, amplitude, phase):
StrokeShader.__init__(self)
- self._wavelength = wavelength
- self._amplitude = amplitude
- self._phase = phase / wavelength * 2 * math.pi
+ self.wavelength = wavelength
+ self.amplitude = amplitude
+ self.phase = phase / wavelength * 2 * pi
def shade(self, stroke):
- # separately iterate over stroke vertices to compute normals
- buf = []
- for it, distance in iter_distance_along_stroke(stroke):
- buf.append((it.object, distance, stroke_normal(it)))
- # iterate over the vertices again to displace them
- for v, distance, normal in buf:
- n = normal * self._amplitude * math.cos(distance / self._wavelength * 2 * math.pi + self._phase)
- v.point = v.point + n
+ # normals are stored in a tuple, so they don't update when we reposition vertices.
+ normals = tuple(stroke_normal(stroke))
+ distances = iter_distance_along_stroke(stroke)
+ coeff = 1 / self.wavelength * 2 * pi
+ for svert, distance, normal in zip(stroke, distances, normals):
+ n = normal * self.amplitude * cos(distance * coeff + self.phase)
+ svert.point += n
stroke.update_length()
class PerlinNoise1DShader(StrokeShader):
- def __init__(self, freq=10, amp=10, oct=4, angle=math.radians(45), seed=-1):
+ """
+ Displaces the stroke using the curvilinear abscissa. This means
+ that lines with the same length and sampling interval will be
+ identically distorded
+ """
+ def __init__(self, freq=10, amp=10, oct=4, angle=radians(45), seed=-1):
StrokeShader.__init__(self)
- self.__noise = Noise(seed)
- self.__freq = freq
- self.__amp = amp
- self.__oct = oct
- self.__dir = mathutils.Vector((math.cos(angle), math.sin(angle)))
+ self.noise = Noise(seed)
+ self.freq = freq
+ self.amp = amp
+ self.oct = oct
+ self.dir = Vector((cos(angle), sin(angle)))
def shade(self, stroke):
length = stroke.length_2d
- it = stroke.stroke_vertices_begin()
- while not it.is_end:
- v = it.object
- nres = self.__noise.turbulence1(length * v.u, self.__freq, self.__amp, self.__oct)
- v.point = v.point + nres * self.__dir
- it.increment()
+ for svert in stroke:
+ nres = self.noise.turbulence1(length * svert.u, self.freq, self.amp, self.oct)
+ svert.point += nres * self.dir
stroke.update_length()
class PerlinNoise2DShader(StrokeShader):
- def __init__(self, freq=10, amp=10, oct=4, angle=math.radians(45), seed=-1):
+ """
+ Displaces the stroke using the strokes coordinates. This means
+ that in a scene no strokes will be distorded identically
+
+ More information on the noise shaders can be found at
+ freestyleintegration.wordpress.com/2011/09/25/development-updates-on-september-25/
+ """
+ def __init__(self, freq=10, amp=10, oct=4, angle=radians(45), seed=-1):
StrokeShader.__init__(self)
- self.__noise = Noise(seed)
- self.__freq = freq
- self.__amp = amp
- self.__oct = oct
- self.__dir = mathutils.Vector((math.cos(angle), math.sin(angle)))
+ self.noise = Noise(seed)
+ self.freq = freq
+ self.amp = amp
+ self.oct = oct
+ self.dir = Vector((cos(angle), sin(angle)))
def shade(self, stroke):
- it = stroke.stroke_vertices_begin()
- while not it.is_end:
- v = it.object
- vec = mathutils.Vector((v.projected_x, v.projected_y))
- nres = self.__noise.turbulence2(vec, self.__freq, self.__amp, self.__oct)
- v.point = v.point + nres * self.__dir
- it.increment()
+ for svert in stroke:
+ projected = Vector((svert.projected_x, svert.projected_y))
+ nres = self.noise.turbulence2(projected, self.freq, self.amp, self.oct)
+ svert.point += nres * self.dir
stroke.update_length()
class Offset2DShader(StrokeShader):
+ """Offsets the stroke by a given amount """
def __init__(self, start, end, x, y):
StrokeShader.__init__(self)
- self.__start = start
- self.__end = end
- self.__xy = mathutils.Vector((x, y))
+ self.start = start
+ self.end = end
+ self.xy = Vector((x, y))
def shade(self, stroke):
- # first iterate over stroke vertices to compute normals
- buf = []
- it = stroke.stroke_vertices_begin()
- while not it.is_end:
- buf.append((it.object, stroke_normal(it)))
- it.increment()
- # again iterate over the vertices to add displacement
- for v, n in buf:
- a = self.__start + v.u * (self.__end - self.__start)
- n = n * a
- v.point = v.point + n + self.__xy
+ # normals are stored in a tuple, so they don't update when we reposition vertices.
+ normals = tuple(stroke_normal(stroke))
+ for svert, normal in zip(stroke, normals):
+ a = self.start + svert.u * (self.end - self.start)
+ svert.point += (normal * a) + self.xy
stroke.update_length()
class Transform2DShader(StrokeShader):
+ """Transforms the stroke (scale, rotation, location) around a given pivot point """
def __init__(self, pivot, scale_x, scale_y, angle, pivot_u, pivot_x, pivot_y):
StrokeShader.__init__(self)
- self.__pivot = pivot
- self.__scale_x = scale_x
- self.__scale_y = scale_y
- self.__angle = angle
- self.__pivot_u = pivot_u
- self.__pivot_x = pivot_x
- self.__pivot_y = pivot_y
+ self.pivot = pivot
+ self.scale = Vector((scale_x, scale_y))
+ self.cos_theta = cos(angle)
+ self.sin_theta = sin(angle)
+ self.pivot_u = pivot_u
+ self.pivot_x = pivot_x
+ self.pivot_y = pivot_y
+ if pivot not in {'START', 'END', 'CENTER', 'ABSOLUTE', 'PARAM'}:
+ raise ValueError("expected pivot in {'START', 'END', 'CENTER', 'ABSOLUTE', 'PARAM'}, not" + pivot)
def shade(self, stroke):
# determine the pivot of scaling and rotation operations
- if self.__pivot == 'START':
- it = stroke.stroke_vertices_begin()
- pivot = it.object.point
- elif self.__pivot == 'END':
- it = stroke.stroke_vertices_end()
- it.decrement()
- pivot = it.object.point
- elif self.__pivot == 'PARAM':
- p = None
- it = stroke.stroke_vertices_begin()
- while not it.is_end:
- prev = p
- v = it.object
- p = v.point
- u = v.u
- if self.__pivot_u < u:
- break
- it.increment()
- if prev is None:
- pivot = p
+ if self.pivot == 'START':
+ pivot = stroke[0].point
+ elif self.pivot == 'END':
+ pivot = stroke[-1].point
+ elif self.pivot == 'CENTER':
+ # minor rounding errors here, because
+ # given v = Vector(a, b), then (v / n) != Vector(v.x / n, v.y / n)
+ pivot = (1 / len(stroke)) * sum((svert.point for svert in stroke), Vector((0.0, 0.0)))
+ elif self.pivot == 'ABSOLUTE':
+ pivot = Vector((self.pivot_x, self.pivot_y))
+ elif self.pivot == 'PARAM':
+ if self.pivot_u < stroke[0].u:
+ pivot = stroke[0].point
else:
- delta = u - self.__pivot_u
- pivot = p + delta * (prev - p)
- elif self.__pivot == 'CENTER':
- pivot = mathutils.Vector((0.0, 0.0))
- n = 0
- it = stroke.stroke_vertices_begin()
- while not it.is_end:
- p = it.object.point
- pivot = pivot + p
- n += 1
- it.increment()
- pivot.x = pivot.x / n
- pivot.y = pivot.y / n
- elif self.__pivot == 'ABSOLUTE':
- pivot = mathutils.Vector((self.__pivot_x, self.__pivot_y))
+ for prev, svert in pairwise(stroke):
+ if self.pivot_u < svert.u:
+ break
+ pivot = svert.point + (svert.u - self.pivot_u) * (prev.point - svert.point)
+
# apply scaling and rotation operations
- cos_theta = math.cos(self.__angle)
- sin_theta = math.sin(self.__angle)
- it = stroke.stroke_vertices_begin()
- while not it.is_end:
- v = it.object
- p = v.point
- p = p - pivot
- x = p.x * self.__scale_x
- y = p.y * self.__scale_y
- p.x = x * cos_theta - y * sin_theta
- p.y = x * sin_theta + y * cos_theta
- v.point = p + pivot
- it.increment()
+ for svert in stroke:
+ p = (svert.point - pivot)
+ x = p.x * self.scale.x
+ y = p.y * self.scale.y
+ p.x = x * self.cos_theta - y * self.sin_theta
+ p.y = x * self.sin_theta + y * self.cos_theta
+ svert.point = p + pivot
stroke.update_length()
@@ -746,165 +620,46 @@ class Transform2DShader(StrokeShader):
class QuantitativeInvisibilityRangeUP1D(UnaryPredicate1D):
def __init__(self, qi_start, qi_end):
UnaryPredicate1D.__init__(self)
- self.__getQI = QuantitativeInvisibilityF1D()
- self.__qi_start = qi_start
- self.__qi_end = qi_end
+ self.getQI = QuantitativeInvisibilityF1D()
+ self.qi_start = qi_start
+ self.qi_end = qi_end
def __call__(self, inter):
- qi = self.__getQI(inter)
- return self.__qi_start <= qi <= self.__qi_end
-
-
-def join_unary_predicates(upred_list, bpred):
- if not upred_list:
- return None
- upred = upred_list[0]
- for p in upred_list[1:]:
- upred = bpred(upred, p)
- return upred
+ qi = self.getQI(inter)
+ return self.qi_start <= qi <= self.qi_end
class ObjectNamesUP1D(UnaryPredicate1D):
def __init__(self, names, negative):
UnaryPredicate1D.__init__(self)
- self._names = names
- self._negative = negative
+ self.names = names
+ self.negative = negative
def __call__(self, viewEdge):
- found = viewEdge.viewshape.name in self._names
- if self._negative:
+ found = viewEdge.viewshape.name in self.names
+ if self.negative:
return not found
return found
-# Stroke caps
-
-def iter_stroke_vertices(stroke):
- it = stroke.stroke_vertices_begin()
- prev_p = None
- while not it.is_end:
- sv = it.object
- p = sv.point
- if prev_p is None or (prev_p - p).length > 1e-6:
- yield sv
- prev_p = p.copy()
- it.increment()
-
-
-class RoundCapShader(StrokeShader):
- def round_cap_thickness(self, x):
- x = max(0.0, min(x, 1.0))
- return math.sqrt(1.0 - (x ** 2.0))
-
- def shade(self, stroke):
- # save the location and attribute of stroke vertices
- buffer = []
- for sv in iter_stroke_vertices(stroke):
- buffer.append((mathutils.Vector(sv.point), StrokeAttribute(sv.attribute)))
- nverts = len(buffer)
- if nverts < 2:
- return
- # calculate the number of additional vertices to form caps
- R, L = stroke[0].attribute.thickness
- caplen_beg = (R + L) / 2.0
- nverts_beg = max(5, int(R + L))
- R, L = stroke[-1].attribute.thickness
- caplen_end = (R + L) / 2.0
- nverts_end = max(5, int(R + L))
- # adjust the total number of stroke vertices
- stroke.resample(nverts + nverts_beg + nverts_end)
- # restore the location and attribute of the original vertices
- for i in range(nverts):
- p, attr = buffer[i]
- stroke[nverts_beg + i].point = p
- stroke[nverts_beg + i].attribute = attr
- # reshape the cap at the beginning of the stroke
- q, attr = buffer[1]
- p, attr = buffer[0]
- d = p - q
- d = d / d.length * caplen_beg
- n = 1.0 / nverts_beg
- R, L = attr.thickness
- for i in range(nverts_beg):
- t = (nverts_beg - i) * n
- stroke[i].point = p + d * t
- r = self.round_cap_thickness((nverts_beg - i + 1) * n)
- stroke[i].attribute = attr
- stroke[i].attribute.thickness = (R * r, L * r)
- # reshape the cap at the end of the stroke
- q, attr = buffer[-2]
- p, attr = buffer[-1]
- d = p - q
- d = d / d.length * caplen_end
- n = 1.0 / nverts_end
- R, L = attr.thickness
- for i in range(nverts_end):
- t = (nverts_end - i) * n
- stroke[-i - 1].point = p + d * t
- r = self.round_cap_thickness((nverts_end - i + 1) * n)
- stroke[-i - 1].attribute = attr
- stroke[-i - 1].attribute.thickness = (R * r, L * r)
- # update the curvilinear 2D length of each vertex
- stroke.update_length()
-
-
-class SquareCapShader(StrokeShader):
- def shade(self, stroke):
- # save the location and attribute of stroke vertices
- buffer = []
- for sv in iter_stroke_vertices(stroke):
- buffer.append((mathutils.Vector(sv.point), StrokeAttribute(sv.attribute)))
- nverts = len(buffer)
- if nverts < 2:
- return
- # calculate the number of additional vertices to form caps
- R, L = stroke[0].attribute.thickness
- caplen_beg = (R + L) / 2.0
- nverts_beg = 1
- R, L = stroke[-1].attribute.thickness
- caplen_end = (R + L) / 2.0
- nverts_end = 1
- # adjust the total number of stroke vertices
- stroke.resample(nverts + nverts_beg + nverts_end)
- # restore the location and attribute of the original vertices
- for i in range(nverts):
- p, attr = buffer[i]
- stroke[nverts_beg + i].point = p
- stroke[nverts_beg + i].attribute = attr
- # reshape the cap at the beginning of the stroke
- q, attr = buffer[1]
- p, attr = buffer[0]
- d = p - q
- stroke[0].point = p + d / d.length * caplen_beg
- stroke[0].attribute = attr
- # reshape the cap at the end of the stroke
- q, attr = buffer[-2]
- p, attr = buffer[-1]
- d = p - q
- stroke[-1].point = p + d / d.length * caplen_beg
- stroke[-1].attribute = attr
- # update the curvilinear 2D length of each vertex
- stroke.update_length()
-
-
-# Split by dashed line pattern
+# -- Split by dashed line pattern -- #
class SplitPatternStartingUP0D(UnaryPredicate0D):
def __init__(self, controller):
UnaryPredicate0D.__init__(self)
- self._controller = controller
+ self.controller = controller
def __call__(self, inter):
- return self._controller.start()
+ return self.controller.start()
class SplitPatternStoppingUP0D(UnaryPredicate0D):
def __init__(self, controller):
UnaryPredicate0D.__init__(self)
- self._controller = controller
+ self.controller = controller
def __call__(self, inter):
- return self._controller.stop()
+ return self.controller.stop()
class SplitPatternController:
@@ -946,29 +701,29 @@ class SplitPatternController:
class DashedLineShader(StrokeShader):
def __init__(self, pattern):
StrokeShader.__init__(self)
- self._pattern = pattern
+ self.pattern = pattern
def shade(self, stroke):
- index = 0 # pattern index
start = 0.0 # 2D curvilinear length
visible = True
+ """ The extra 'sampling' term is added below, because the
+ visibility attribute of the i-th vertex refers to the
+ visibility of the stroke segment between the i-th and
+ (i+1)-th vertices. """
sampling = 1.0
it = stroke.stroke_vertices_begin(sampling)
- while not it.is_end:
+ pattern_cycle = cycle(self.pattern)
+ pattern = next(pattern_cycle)
+ for svert in it:
pos = it.t # curvilinear abscissa
- # The extra 'sampling' term is added below, because the
- # visibility attribute of the i-th vertex refers to the
- # visibility of the stroke segment between the i-th and
- # (i+1)-th vertices.
- if pos - start + sampling > self._pattern[index]:
+
+ if pos - start + sampling > pattern:
start = pos
- index += 1
- if index == len(self._pattern):
- index = 0
+ pattern = next(pattern_cycle)
visible = not visible
+
if not visible:
- it.object.attribute.visible = visible
- it.increment()
+ it.object.attribute.visible = False
# predicates for chaining
@@ -976,7 +731,7 @@ class DashedLineShader(StrokeShader):
class AngleLargerThanBP1D(BinaryPredicate1D):
def __init__(self, angle):
BinaryPredicate1D.__init__(self)
- self._angle = angle
+ self.angle = angle
def __call__(self, i1, i2):
sv1a = i1.first_fedge.first_svertex.point_2d
@@ -1001,38 +756,28 @@ class AngleLargerThanBP1D(BinaryPredicate1D):
if denom < 1e-6:
return False
x = (dir1 * dir2) / denom
- return math.acos(min(max(x, -1.0), 1.0)) > self._angle
-
-
-class AndBP1D(BinaryPredicate1D):
- def __init__(self, pred1, pred2):
- BinaryPredicate1D.__init__(self)
- self.__pred1 = pred1
- self.__pred2 = pred2
-
- def __call__(self, i1, i2):
- return self.__pred1(i1, i2) and self.__pred2(i1, i2)
-
+ return acos(bound(-1.0, x, 1.0)) > self.angle
# predicates for selection
+
class LengthThresholdUP1D(UnaryPredicate1D):
def __init__(self, length_min=None, length_max=None):
UnaryPredicate1D.__init__(self)
- self._length_min = length_min
- self._length_max = length_max
+ self.length_min = length_min
+ self.length_max = length_max
def __call__(self, inter):
length = inter.length_2d
- if self._length_min is not None and length < self._length_min:
+ if self.length_min is not None and length < self.length_min:
return False
- if self._length_max is not None and length > self._length_max:
+ if self.length_max is not None and length > self.length_max:
return False
return True
class FaceMarkBothUP1D(UnaryPredicate1D):
- def __call__(self, inter): # ViewEdge
+ def __call__(self, inter: ViewEdge):
fe = inter.first_fedge
while fe is not None:
if fe.is_smooth:
@@ -1049,7 +794,7 @@ class FaceMarkBothUP1D(UnaryPredicate1D):
class FaceMarkOneUP1D(UnaryPredicate1D):
- def __call__(self, inter): # ViewEdge
+ def __call__(self, inter: ViewEdge):
fe = inter.first_fedge
while fe is not None:
if fe.is_smooth:
@@ -1069,17 +814,17 @@ class FaceMarkOneUP1D(UnaryPredicate1D):
class MaterialBoundaryUP0D(UnaryPredicate0D):
def __call__(self, it):
- if it.is_begin:
- return False
- it_prev = Interface0DIterator(it)
- it_prev.decrement()
- v = it.object
- it.increment()
- if it.is_end:
+ if (it.is_begin or it.is_end):
return False
- fe = v.get_fedge(it_prev.object)
+ else:
+ it.decrement()
+ prev = it.object
+ svert = next(it)
+ succ = next(it)
+
+ fe = svert.get_fedge(prev)
idx1 = fe.material_index if fe.is_smooth else fe.material_index_left
- fe = v.get_fedge(it.object)
+ fe = svert.get_fedge(succ)
idx2 = fe.material_index if fe.is_smooth else fe.material_index_left
return idx1 != idx2
@@ -1087,15 +832,15 @@ class MaterialBoundaryUP0D(UnaryPredicate0D):
class Curvature2DAngleThresholdUP0D(UnaryPredicate0D):
def __init__(self, angle_min=None, angle_max=None):
UnaryPredicate0D.__init__(self)
- self._angle_min = angle_min
- self._angle_max = angle_max
- self._func = Curvature2DAngleF0D()
+ self.angle_min = angle_min
+ self.angle_max = angle_max
+ self.func = Curvature2DAngleF0D()
def __call__(self, inter):
- angle = math.pi - self._func(inter)
- if self._angle_min is not None and angle < self._angle_min:
+ angle = pi - self.func(inter)
+ if self.angle_min is not None and angle < self.angle_min:
return True
- if self._angle_max is not None and angle > self._angle_max:
+ if self.angle_max is not None and angle > self.angle_max:
return True
return False
@@ -1103,17 +848,17 @@ class Curvature2DAngleThresholdUP0D(UnaryPredicate0D):
class Length2DThresholdUP0D(UnaryPredicate0D):
def __init__(self, length_limit):
UnaryPredicate0D.__init__(self)
- self._length_limit = length_limit
- self._t = 0.0
+ self.length_limit = length_limit
+ self.t = 0.0
def __call__(self, inter):
t = inter.t # curvilinear abscissa
- if t < self._t:
- self._t = 0.0
+ if t < self.t:
+ self.t = 0.0
return False
- if t - self._t < self._length_limit:
+ if t - self.t < self.length_limit:
return False
- self._t = t
+ self.t = t
return True
@@ -1192,9 +937,9 @@ def process(layer_name, lineset_name):
upred = ExternalContourUP1D()
edge_type_criteria.append(NotUP1D(upred) if lineset.exclude_external_contour else upred)
if lineset.edge_type_combination == 'OR':
- upred = join_unary_predicates(edge_type_criteria, OrUP1D)
+ upred = OrUP1D(*edge_type_criteria)
else:
- upred = join_unary_predicates(edge_type_criteria, AndUP1D)
+ upred = AndUP1D(*edge_type_criteria)
if upred is not None:
if lineset.edge_type_negation == 'EXCLUSIVE':
upred = NotUP1D(upred)
@@ -1205,22 +950,22 @@ def process(layer_name, lineset_name):
upred = FaceMarkBothUP1D()
else:
upred = FaceMarkOneUP1D()
+
if lineset.face_mark_negation == 'EXCLUSIVE':
upred = NotUP1D(upred)
selection_criteria.append(upred)
# prepare selection criteria by group of objects
if lineset.select_by_group:
if lineset.group is not None:
- names = dict((ob.name, True) for ob in lineset.group.objects)
+ names = {ob.name: True for ob in lineset.group.objects}
upred = ObjectNamesUP1D(names, lineset.group_negation == 'EXCLUSIVE')
selection_criteria.append(upred)
# prepare selection criteria by image border
if lineset.select_by_image_border:
- xmin, ymin, xmax, ymax = ContextFunctions.get_border()
- upred = WithinImageBoundaryUP1D(xmin, ymin, xmax, ymax)
+ upred = WithinImageBoundaryUP1D(*ContextFunctions.get_border())
selection_criteria.append(upred)
# select feature edges
- upred = join_unary_predicates(selection_criteria, AndUP1D)
+ upred = AndUP1D(*selection_criteria)
if upred is None:
upred = TrueUP1D()
Operators.select(upred)
@@ -1330,15 +1075,7 @@ def process(layer_name, lineset_name):
elif m.type == '2D_TRANSFORM':
shaders_list.append(Transform2DShader(
m.pivot, m.scale_x, m.scale_y, m.angle, m.pivot_u, m.pivot_x, m.pivot_y))
- if linestyle.use_texture:
- has_tex = False
- for slot in linestyle.texture_slots:
- if slot is not None:
- shaders_list.append(BlenderTextureShader(slot))
- has_tex = True
- if has_tex:
- shaders_list.append(StrokeTextureStepShader(linestyle.texture_spacing))
- color = linestyle.color
+
if (not linestyle.use_chaining) or (linestyle.chaining == 'PLAIN' and linestyle.use_same_object):
thickness_position = linestyle.thickness_position
else:
@@ -1347,9 +1084,11 @@ def process(layer_name, lineset_name):
if bpy.app.debug_freestyle:
print("Warning: Thickness position options are applied when chaining is disabled\n"
" or the Plain chaining is used with the Same Object option enabled.")
- shaders_list.append(BaseColorShader(color.r, color.g, color.b, linestyle.alpha))
+
+ shaders_list.append(ConstantColorShader(*(linestyle.color), alpha=linestyle.alpha))
shaders_list.append(BaseThicknessShader(linestyle.thickness, thickness_position,
linestyle.thickness_ratio))
+ # -- Modifiers and textures -- #
for m in linestyle.color_modifiers:
if not m.use:
continue
@@ -1360,7 +1099,7 @@ def process(layer_name, lineset_name):
shaders_list.append(ColorDistanceFromCameraShader(
m.blend, m.influence, m.color_ramp,
m.range_min, m.range_max))
- elif m.type == 'DISTANCE_FROM_OBJECT':
+ elif m.type == 'DISTANCE_FROM_OBJECT' and m.target is not None:
shaders_list.append(ColorDistanceFromObjectShader(
m.blend, m.influence, m.color_ramp, m.target,
m.range_min, m.range_max))
@@ -1378,7 +1117,7 @@ def process(layer_name, lineset_name):
shaders_list.append(AlphaDistanceFromCameraShader(
m.blend, m.influence, m.mapping, m.invert, m.curve,
m.range_min, m.range_max))
- elif m.type == 'DISTANCE_FROM_OBJECT':
+ elif m.type == 'DISTANCE_FROM_OBJECT' and m.target is not None:
shaders_list.append(AlphaDistanceFromObjectShader(
m.blend, m.influence, m.mapping, m.invert, m.curve, m.target,
m.range_min, m.range_max))
@@ -1399,7 +1138,7 @@ def process(layer_name, lineset_name):
thickness_position, linestyle.thickness_ratio,
m.blend, m.influence, m.mapping, m.invert, m.curve,
m.range_min, m.range_max, m.value_min, m.value_max))
- elif m.type == 'DISTANCE_FROM_OBJECT':
+ elif m.type == 'DISTANCE_FROM_OBJECT' and m.target is not None:
shaders_list.append(ThicknessDistanceFromObjectShader(
thickness_position, linestyle.thickness_ratio,
m.blend, m.influence, m.mapping, m.invert, m.curve, m.target,
@@ -1414,10 +1153,17 @@ def process(layer_name, lineset_name):
thickness_position, linestyle.thickness_ratio,
m.blend, m.influence,
m.orientation, m.thickness_min, m.thickness_max))
+ if linestyle.use_texture:
+ textures = tuple(BlenderTextureShader(slot) for slot in linestyle.texture_slots if slot is not None)
+ if textures:
+ shaders_list.extend(textures)
+ shaders_list.append(StrokeTextureStepShader(linestyle.texture_spacing))
+ # -- Stroke caps -- #
if linestyle.caps == 'ROUND':
shaders_list.append(RoundCapShader())
elif linestyle.caps == 'SQUARE':
shaders_list.append(SquareCapShader())
+ # -- Dashed line -- #
if linestyle.use_dashed_line:
pattern = []
if linestyle.dash1 > 0 and linestyle.gap1 > 0: