diff options
author | Tamito Kajiyama <rd6t-kjym@asahi-net.or.jp> | 2013-01-28 00:17:49 +0400 |
---|---|---|
committer | Tamito Kajiyama <rd6t-kjym@asahi-net.or.jp> | 2013-01-28 00:17:49 +0400 |
commit | 156acd3370a4f9090dc1507f275bf2cb695ac371 (patch) | |
tree | f19c6234d4ddb2bbf193f49cda2b224fc62d64fb /release/scripts/freestyle | |
parent | 39f8b95443f5dc83a24fb2c85847ea45e018925e (diff) |
Freestyle Python API improvements - part 1.
* The API syntax of StrokeVertex and StrokeAttribute was updated by means of getter/setter
properties instead of class methods. Python style modules (including the Parameter Editor
implementation) were updated accordingly.
* Code clean-up was done for a few Python style modules, mostly by removing duplicated
definitions of stroke shaders and fixing indentation.
Diffstat (limited to 'release/scripts/freestyle')
6 files changed, 214 insertions, 354 deletions
diff --git a/release/scripts/freestyle/style_modules/anisotropic_diffusion.py b/release/scripts/freestyle/style_modules/anisotropic_diffusion.py index 7e7ebf647bd..83cc9991d8b 100644 --- a/release/scripts/freestyle/style_modules/anisotropic_diffusion.py +++ b/release/scripts/freestyle/style_modules/anisotropic_diffusion.py @@ -27,48 +27,21 @@ from freestyle_init import * from logical_operators import * -from PredicatesB1D import * from shaders import * -from PredicatesU0D import * -from math import * -## thickness modifiers - -normalInfo=Normal2DF0D() -curvatureInfo=Curvature2DAngleF0D() - -def edgestopping(x, sigma): - return exp(- x*x/(2*sigma*sigma)) - -class pyDiffusion2Shader(StrokeShader): - def __init__(self, lambda1, nbIter): - StrokeShader.__init__(self) - self._lambda = lambda1 - self._nbIter = nbIter - def getName(self): - return "pyDiffusionShader" - def shade(self, stroke): - for i in range (1, self._nbIter): - it = stroke.strokeVerticesBegin() - while it.isEnd() == 0: - v=it.getObject() - p1 = v.getPoint() - p2 = normalInfo(it.castToInterface0DIterator())*self._lambda*curvatureInfo(it.castToInterface0DIterator()) - v.setPoint(p1+p2) - it.increment() +# pyDiffusion2Shader parameters +offset = 0.25 +nbIter = 30 upred = AndUP1D(QuantitativeInvisibilityUP1D(0), ExternalContourUP1D()) Operators.select( upred ) -bpred = TrueBP1D(); +bpred = TrueBP1D() Operators.bidirectionalChain(ChainPredicateIterator(upred, bpred), NotUP1D(upred) ) -shaders_list = [ - ConstantThicknessShader(4), - StrokeTextureShader("smoothAlpha.bmp", Stroke.OPAQUE_MEDIUM, 0), - SamplingShader(2), - pyDiffusion2Shader(-0.03, 30), - IncreasingColorShader(1.0,0.0,0.0,1, 0, 1, 0, 1) - ] +shaders_list = [ + ConstantThicknessShader(4), + StrokeTextureShader("smoothAlpha.bmp", Stroke.OPAQUE_MEDIUM, 0), + SamplingShader(2), + pyDiffusion2Shader(offset, nbIter), + IncreasingColorShader(1, 0, 0, 1, 0, 1, 0, 1) + ] Operators.create(TrueUP1D(), shaders_list) - - - diff --git a/release/scripts/freestyle/style_modules/curvature2d.py b/release/scripts/freestyle/style_modules/curvature2d.py index fc2bcab4946..0c013997a16 100644 --- a/release/scripts/freestyle/style_modules/curvature2d.py +++ b/release/scripts/freestyle/style_modules/curvature2d.py @@ -28,33 +28,13 @@ from freestyle_init import * from logical_operators import * -from PredicatesB1D import * from shaders import * -class py2DCurvatureColorShader(StrokeShader): - def getName(self): - return "py2DCurvatureColorShader" - - def shade(self, stroke): - it = stroke.strokeVerticesBegin() - it_end = stroke.strokeVerticesEnd() - func = Curvature2DAngleF0D() - while it.isEnd() == 0: - it0D = it.castToInterface0DIterator() - sv = it.getObject() - att = sv.attribute() - c = func(it0D) - if (c<0): - print("negative 2D curvature") - color = 10.0 * c/3.1415 - att.setColor(color,color,color); - it.increment() - Operators.select(QuantitativeInvisibilityUP1D(0)) Operators.bidirectionalChain(ChainSilhouetteIterator(), NotUP1D(QuantitativeInvisibilityUP1D(0))) -shaders_list = [ - StrokeTextureShader("smoothAlpha.bmp", Stroke.OPAQUE_MEDIUM, 0), - ConstantThicknessShader(5), - py2DCurvatureColorShader() - ] +shaders_list = [ + StrokeTextureShader("smoothAlpha.bmp", Stroke.OPAQUE_MEDIUM, 0), + ConstantThicknessShader(5), + py2DCurvatureColorShader() + ] Operators.create(TrueUP1D(), shaders_list) diff --git a/release/scripts/freestyle/style_modules/parameter_editor.py b/release/scripts/freestyle/style_modules/parameter_editor.py index 76ca49adaa6..1cec55fc4d4 100644 --- a/release/scripts/freestyle/style_modules/parameter_editor.py +++ b/release/scripts/freestyle/style_modules/parameter_editor.py @@ -109,7 +109,7 @@ class ThicknessModifierMixIn: outer, inner = inner, outer else: outer = inner = (outer + inner) / 2 - sv.attribute().setThickness(outer, inner) + sv.attribute.thickness = (outer, inner) class ThicknessBlenderMixIn(ThicknessModifierMixIn): def __init__(self, position, ratio): @@ -171,7 +171,7 @@ def iter_t2d_along_stroke(stroke): distance = 0.0 it = stroke.strokeVerticesBegin() while not it.isEnd(): - p = it.getObject().getPoint() + p = it.getObject().point if not it.isBegin(): distance += (prev - p).length prev = p @@ -184,22 +184,20 @@ class ColorAlongStrokeShader(ColorRampModifier): return "ColorAlongStrokeShader" def shade(self, stroke): for it, t in iter_t2d_along_stroke(stroke): - attr = it.getObject().attribute() - a = attr.getColorRGB() + sv = it.getObject() + a = sv.attribute.color b = self.evaluate(t) - c = self.blend_ramp(a, b) - attr.setColor(c) + sv.attribute.color = self.blend_ramp(a, b) class AlphaAlongStrokeShader(CurveMappingModifier): def getName(self): return "AlphaAlongStrokeShader" def shade(self, stroke): for it, t in iter_t2d_along_stroke(stroke): - attr = it.getObject().attribute() - a = attr.getAlpha() + sv = it.getObject() + a = sv.attribute.alpha b = self.evaluate(t) - c = self.blend(a, b) - attr.setAlpha(c) + sv.attribute.alpha = self.blend(a, b) class ThicknessAlongStrokeShader(ThicknessBlenderMixIn, CurveMappingModifier): def __init__(self, thickness_position, thickness_ratio, @@ -213,7 +211,7 @@ class ThicknessAlongStrokeShader(ThicknessBlenderMixIn, CurveMappingModifier): def shade(self, stroke): for it, t in iter_t2d_along_stroke(stroke): sv = it.getObject() - a = sv.attribute().getThicknessRL() + a = sv.attribute.thickness b = self.__value_min + self.evaluate(t) * (self.__value_max - self.__value_min) c = self.blend_thickness(a[0], a[1], b) self.set_thickness(sv, c[0], c[1]) @@ -244,11 +242,10 @@ class ColorDistanceFromCameraShader(ColorRampModifier): return "ColorDistanceFromCameraShader" def shade(self, stroke): for it, t in iter_distance_from_camera(stroke, self.__range_min, self.__range_max): - attr = it.getObject().attribute() - a = attr.getColorRGB() + sv = it.getObject() + a = sv.attribute.color b = self.evaluate(t) - c = self.blend_ramp(a, b) - attr.setColor(c) + sv.attribute.color = self.blend_ramp(a, b) class AlphaDistanceFromCameraShader(CurveMappingModifier): def __init__(self, blend, influence, mapping, invert, curve, range_min, range_max): @@ -259,11 +256,10 @@ class AlphaDistanceFromCameraShader(CurveMappingModifier): return "AlphaDistanceFromCameraShader" def shade(self, stroke): for it, t in iter_distance_from_camera(stroke, self.__range_min, self.__range_max): - attr = it.getObject().attribute() - a = attr.getAlpha() + sv = it.getObject() + a = sv.attribute.alpha b = self.evaluate(t) - c = self.blend(a, b) - attr.setAlpha(c) + sv.attribute.alpha = self.blend(a, b) class ThicknessDistanceFromCameraShader(ThicknessBlenderMixIn, CurveMappingModifier): def __init__(self, thickness_position, thickness_ratio, @@ -279,7 +275,7 @@ class ThicknessDistanceFromCameraShader(ThicknessBlenderMixIn, CurveMappingModif def shade(self, stroke): for it, t in iter_distance_from_camera(stroke, self.__range_min, self.__range_max): sv = it.getObject() - a = sv.attribute().getThicknessRL() + a = sv.attribute.thickness b = self.__value_min + self.evaluate(t) * (self.__value_max - self.__value_min) c = self.blend_thickness(a[0], a[1], b) self.set_thickness(sv, c[0], c[1]) @@ -317,11 +313,10 @@ class ColorDistanceFromObjectShader(ColorRampModifier): if self.__target is None: return for it, t in iter_distance_from_object(stroke, self.__target, self.__range_min, self.__range_max): - attr = it.getObject().attribute() - a = attr.getColorRGB() + sv = it.getObject() + a = sv.attribute.color b = self.evaluate(t) - c = self.blend_ramp(a, b) - attr.setColor(c) + sv.attribute.color = self.blend_ramp(a, b) class AlphaDistanceFromObjectShader(CurveMappingModifier): def __init__(self, blend, influence, mapping, invert, curve, target, range_min, range_max): @@ -335,11 +330,10 @@ class AlphaDistanceFromObjectShader(CurveMappingModifier): if self.__target is None: return for it, t in iter_distance_from_object(stroke, self.__target, self.__range_min, self.__range_max): - attr = it.getObject().attribute() - a = attr.getAlpha() + sv = it.getObject() + a = sv.attribute.alpha b = self.evaluate(t) - c = self.blend(a, b) - attr.setAlpha(c) + sv.attribute.alpha = self.blend(a, b) class ThicknessDistanceFromObjectShader(ThicknessBlenderMixIn, CurveMappingModifier): def __init__(self, thickness_position, thickness_ratio, @@ -358,7 +352,7 @@ class ThicknessDistanceFromObjectShader(ThicknessBlenderMixIn, CurveMappingModif return for it, t in iter_distance_from_object(stroke, self.__target, self.__range_min, self.__range_max): sv = it.getObject() - a = sv.attribute().getThicknessRL() + a = sv.attribute.thickness b = self.__value_min + self.evaluate(t) * (self.__value_max - self.__value_min) c = self.blend_thickness(a[0], a[1], b) self.set_thickness(sv, c[0], c[1]) @@ -429,17 +423,15 @@ class ColorMaterialShader(ColorRampModifier): def shade(self, stroke): if self.__material_attr in ["DIFF", "SPEC"] and not self.__use_ramp: for it, b in iter_material_color(stroke, self.__material_attr): - attr = it.getObject().attribute() - a = attr.getColorRGB() - c = self.blend_ramp(a, b) - attr.setColor(c) + sv = it.getObject() + a = sv.attribute.color + sv.attribute.color = self.blend_ramp(a, b) else: for it, t in iter_material_value(stroke, self.__material_attr): - attr = it.getObject().attribute() - a = attr.getColorRGB() + sv = it.getObject() + a = sv.attribute.color b = self.evaluate(t) - c = self.blend_ramp(a, b) - attr.setColor(c) + sv.attribute.color = self.blend_ramp(a, b) class AlphaMaterialShader(CurveMappingModifier): def __init__(self, blend, influence, mapping, invert, curve, material_attr): @@ -449,11 +441,10 @@ class AlphaMaterialShader(CurveMappingModifier): return "AlphaMaterialShader" def shade(self, stroke): for it, t in iter_material_value(stroke, self.__material_attr): - attr = it.getObject().attribute() - a = attr.getAlpha() + sv = it.getObject() + a = sv.attribute.alpha b = self.evaluate(t) - c = self.blend(a, b) - attr.setAlpha(c) + sv.attribute.alpha = self.blend(a, b) class ThicknessMaterialShader(ThicknessBlenderMixIn, CurveMappingModifier): def __init__(self, thickness_position, thickness_ratio, @@ -468,7 +459,7 @@ class ThicknessMaterialShader(ThicknessBlenderMixIn, CurveMappingModifier): def shade(self, stroke): for it, t in iter_material_value(stroke, self.__material_attr): sv = it.getObject() - a = sv.attribute().getThicknessRL() + a = sv.attribute.thickness b = self.__value_min + self.evaluate(t) * (self.__value_max - self.__value_min) c = self.blend_thickness(a[0], a[1], b) self.set_thickness(sv, c[0], c[1]) @@ -492,7 +483,7 @@ class CalligraphicThicknessShader(ThicknessBlenderMixIn, ScalarBlendModifier): orthDir.normalize() fac = abs(orthDir * self.__orientation) sv = it.getObject() - a = sv.attribute().getThicknessRL() + a = sv.attribute.thickness b = self.__min_thickness + fac * (self.__max_thickness - self.__min_thickness) b = max(b, 0.0) c = self.blend_thickness(a[0], a[1], b) @@ -505,7 +496,7 @@ def iter_distance_along_stroke(stroke): distance = 0.0 it = stroke.strokeVerticesBegin() while not it.isEnd(): - p = it.getObject().getPoint() + p = it.getObject().point if not it.isBegin(): distance += (prev - p).length prev = p @@ -525,10 +516,8 @@ class SinusDisplacementShader(StrokeShader): for it, distance in iter_distance_along_stroke(stroke): v = it.getObject() n = self._getNormal(it.castToInterface0DIterator()) - p = v.getPoint() - u = v.u() n = n * self._amplitude * math.cos(distance / self._wavelength * 2 * math.pi + self._phase) - v.setPoint(p + n) + v.point = v.point + n stroke.UpdateLength() class PerlinNoise1DShader(StrokeShader): @@ -546,8 +535,8 @@ class PerlinNoise1DShader(StrokeShader): it = stroke.strokeVerticesBegin() while not it.isEnd(): v = it.getObject() - nres = self.__noise.turbulence1(length * v.u(), self.__freq, self.__amp, self.__oct) - v.setPoint(v.getPoint() + nres * self.__dir) + nres = self.__noise.turbulence1(length * v.u, self.__freq, self.__amp, self.__oct) + v.point = v.point + nres * self.__dir it.increment() stroke.UpdateLength() @@ -565,9 +554,9 @@ class PerlinNoise2DShader(StrokeShader): it = stroke.strokeVerticesBegin() while not it.isEnd(): v = it.getObject() - vec = Vector([v.getProjectedX(), v.getProjectedY()]) + vec = Vector([v.getProjectedX(), v.getProjectedY()]) # FIXME nres = self.__noise.turbulence2(vec, self.__freq, self.__amp, self.__oct) - v.setPoint(v.getPoint() + nres * self.__dir) + v.point = v.point + nres * self.__dir it.increment() stroke.UpdateLength() @@ -584,12 +573,11 @@ class Offset2DShader(StrokeShader): it = stroke.strokeVerticesBegin() while not it.isEnd(): v = it.getObject() - u = v.u() + u = v.u a = self.__start + u * (self.__end - self.__start) n = self.__getNormal(it.castToInterface0DIterator()) n = n * a - p = v.getPoint() - v.setPoint(p + n + self.__xy) + v.point = v.point + n + self.__xy it.increment() stroke.UpdateLength() @@ -609,19 +597,19 @@ class Transform2DShader(StrokeShader): # determine the pivot of scaling and rotation operations if self.__pivot == "START": it = stroke.strokeVerticesBegin() - pivot = it.getObject().getPoint() + pivot = it.getObject().point elif self.__pivot == "END": it = stroke.strokeVerticesEnd() it.decrement() - pivot = it.getObject().getPoint() + pivot = it.getObject().point elif self.__pivot == "PARAM": p = None it = stroke.strokeVerticesBegin() while not it.isEnd(): prev = p v = it.getObject() - p = v.getPoint() - u = v.u() + p = v.point + u = v.u if self.__pivot_u < u: break it.increment() @@ -635,7 +623,7 @@ class Transform2DShader(StrokeShader): n = 0 it = stroke.strokeVerticesBegin() while not it.isEnd(): - p = it.getObject().getPoint() + p = it.getObject().point pivot = pivot + p n = n + 1 it.increment() @@ -649,13 +637,13 @@ class Transform2DShader(StrokeShader): it = stroke.strokeVerticesBegin() while not it.isEnd(): v = it.getObject() - p = v.getPoint() + p = v.point p = p - pivot x = p.x * self.__scale_x y = p.y * self.__scale_y p.x = x * cos_theta - y * sin_theta p.y = x * sin_theta + y * cos_theta - v.setPoint(p + pivot) + v.point = p + pivot it.increment() stroke.UpdateLength() @@ -701,7 +689,7 @@ def iter_stroke_vertices(stroke): prev_p = None while not it.isEnd(): sv = it.getObject() - p = sv.getPoint() + p = sv.point if prev_p is None or (prev_p - p).length > 1e-6: yield sv prev_p = p @@ -715,15 +703,15 @@ class RoundCapShader(StrokeShader): # save the location and attribute of stroke vertices buffer = [] for sv in iter_stroke_vertices(stroke): - buffer.append((sv.getPoint(), sv.attribute())) + buffer.append((Vector(sv.point), StrokeAttribute(sv.attribute))) nverts = len(buffer) if nverts < 2: return # calculate the number of additional vertices to form caps - R, L = stroke[0].attribute().getThicknessRL() + R, L = stroke[0].attribute.thickness caplen_beg = (R + L) / 2.0 nverts_beg = max(5, int(R + L)) - R, L = stroke[-1].attribute().getThicknessRL() + R, L = stroke[-1].attribute.thickness caplen_end = (R + L) / 2.0 nverts_end = max(5, int(R + L)) # adjust the total number of stroke vertices @@ -731,34 +719,34 @@ class RoundCapShader(StrokeShader): # restore the location and attribute of the original vertices for i in range(nverts): p, attr = buffer[i] - stroke[nverts_beg + i].setPoint(p) - stroke[nverts_beg + i].setAttribute(attr) + stroke[nverts_beg + i].point = p + stroke[nverts_beg + i].attribute = attr # reshape the cap at the beginning of the stroke q, attr = buffer[1] p, attr = buffer[0] d = p - q d = d / d.length * caplen_beg n = 1.0 / nverts_beg - R, L = attr.getThicknessRL() + R, L = attr.thickness for i in range(nverts_beg): t = (nverts_beg - i) * n - stroke[i].setPoint(p + d * t) + stroke[i].point = p + d * t r = self.round_cap_thickness((nverts_beg - i + 1) * n) - stroke[i].setAttribute(attr) - stroke[i].attribute().setThickness(R * r, L * r) + stroke[i].attribute = attr + stroke[i].attribute.thickness = (R * r, L * r) # reshape the cap at the end of the stroke q, attr = buffer[-2] p, attr = buffer[-1] d = p - q d = d / d.length * caplen_end n = 1.0 / nverts_end - R, L = attr.getThicknessRL() + R, L = attr.thickness for i in range(nverts_end): t = (nverts_end - i) * n - stroke[-i-1].setPoint(p + d * t) + stroke[-i-1].point = p + d * t r = self.round_cap_thickness((nverts_end - i + 1) * n) - stroke[-i-1].setAttribute(attr) - stroke[-i-1].attribute().setThickness(R * r, L * r) + stroke[-i-1].attribute = attr + stroke[-i-1].attribute.thickness = (R * r, L * r) # update the curvilinear 2D length of each vertex stroke.UpdateLength() @@ -767,15 +755,15 @@ class SquareCapShader(StrokeShader): # save the location and attribute of stroke vertices buffer = [] for sv in iter_stroke_vertices(stroke): - buffer.append((sv.getPoint(), sv.attribute())) + buffer.append((Vector(sv.point), StrokeAttribute(sv.attribute))) nverts = len(buffer) if nverts < 2: return # calculate the number of additional vertices to form caps - R, L = stroke[0].attribute().getThicknessRL() + R, L = stroke[0].attribute.thickness caplen_beg = (R + L) / 2.0 nverts_beg = 1 - R, L = stroke[-1].attribute().getThicknessRL() + R, L = stroke[-1].attribute.thickness caplen_end = (R + L) / 2.0 nverts_end = 1 # adjust the total number of stroke vertices @@ -783,20 +771,20 @@ class SquareCapShader(StrokeShader): # restore the location and attribute of the original vertices for i in range(nverts): p, attr = buffer[i] - stroke[nverts_beg + i].setPoint(p) - stroke[nverts_beg + i].setAttribute(attr) + stroke[nverts_beg + i].point = p + stroke[nverts_beg + i].attribute = attr # reshape the cap at the beginning of the stroke q, attr = buffer[1] p, attr = buffer[0] d = p - q - stroke[0].setPoint(p + d / d.length * caplen_beg) - stroke[0].setAttribute(attr) + stroke[0].point = p + d / d.length * caplen_beg + stroke[0].attribute = attr # reshape the cap at the end of the stroke q, attr = buffer[-2] p, attr = buffer[-1] d = p - q - stroke[-1].setPoint(p + d / d.length * caplen_beg) - stroke[-1].setAttribute(attr) + stroke[-1].point = p + d / d.length * caplen_beg + stroke[-1].attribute = attr # update the curvilinear 2D length of each vertex stroke.UpdateLength() @@ -872,7 +860,7 @@ class DashedLineShader(StrokeShader): if index == len(self._pattern): index = 0 visible = not visible - it.getObject().attribute().setVisible(visible) + it.getObject().attribute.visible = visible it.increment() # predicates for chaining diff --git a/release/scripts/freestyle/style_modules/shaders.py b/release/scripts/freestyle/style_modules/shaders.py index f05042f8332..85ba2da0e25 100644 --- a/release/scripts/freestyle/style_modules/shaders.py +++ b/release/scripts/freestyle/style_modules/shaders.py @@ -28,7 +28,7 @@ class pyDepthDiscontinuityThicknessShader(StrokeShader): while it.isEnd() == 0: z = self.__func(it.castToInterface0DIterator()) thickness = a*z+b - it.getObject().attribute().setThickness(thickness, thickness) + it.getObject().attribute.thickness = (thickness, thickness) it.increment() class pyConstantThicknessShader(StrokeShader): @@ -42,9 +42,8 @@ class pyConstantThicknessShader(StrokeShader): it = stroke.strokeVerticesBegin() it_end = stroke.strokeVerticesEnd() while it.isEnd() == 0: - att = it.getObject().attribute() t = self._thickness/2.0 - att.setThickness(t, t) + it.getObject().attribute.thickness = (t, t) it.increment() class pyFXSThicknessShader(StrokeShader): @@ -58,9 +57,8 @@ class pyFXSThicknessShader(StrokeShader): it = stroke.strokeVerticesBegin() it_end = stroke.strokeVerticesEnd() while it.isEnd() == 0: - att = it.getObject().attribute() t = self._thickness/2.0 - att.setThickness(t, t) + it.getObject().attribute.thickness = (t, t) it.increment() class pyFXSVaryingThicknessWithDensityShader(StrokeShader): @@ -81,7 +79,6 @@ class pyFXSVaryingThicknessWithDensityShader(StrokeShader): it_end = stroke.strokeVerticesEnd() func = DensityF0D(self.wsize) while it.isEnd() == 0: - att = it.getObject().attribute() toto = it.castToInterface0DIterator() c= func(toto) if (c < self.threshold_min ): @@ -90,9 +87,10 @@ class pyFXSVaryingThicknessWithDensityShader(StrokeShader): c = self.threshold_max ## t = (c - self.threshold_min)/(self.threshold_max - self.threshold_min)*(self._thicknessMax-self._thicknessMin) + self._thicknessMin t = (self.threshold_max - c )/(self.threshold_max - self.threshold_min)*(self._thicknessMax-self._thicknessMin) + self._thicknessMin - att.setThickness(t/2.0, t/2.0) + it.getObject().attribute.thickness = (t/2.0, t/2.0) i = i+1 it.increment() + class pyIncreasingThicknessShader(StrokeShader): def __init__(self, thicknessMin, thicknessMax): StrokeShader.__init__(self) @@ -107,13 +105,12 @@ class pyIncreasingThicknessShader(StrokeShader): it = stroke.strokeVerticesBegin() it_end = stroke.strokeVerticesEnd() while it.isEnd() == 0: - att = it.getObject().attribute() c = float(i)/float(n) if(i < float(n)/2.0): t = (1.0 - c)*self._thicknessMin + c * self._thicknessMax else: t = (1.0 - c)*self._thicknessMax + c * self._thicknessMin - att.setThickness(t/2.0, t/2.0) + it.getObject().attribute.thickness = (t/2.0, t/2.0) i = i+1 it.increment() @@ -139,15 +136,15 @@ class pyConstrainedIncreasingThicknessShader(StrokeShader): it = stroke.strokeVerticesBegin() it_end = stroke.strokeVerticesEnd() while it.isEnd() == 0: - att = it.getObject().attribute() + att = it.getObject().attribute c = float(i)/float(n) if(i < float(n)/2.0): t = (1.0 - c)*self._thicknessMin + c * maxT else: t = (1.0 - c)*maxT + c * self._thicknessMin - att.setThickness(t/2.0, t/2.0) + att.thickness = (t/2.0, t/2.0) if(i == n-1): - att.setThickness(self._thicknessMin/2.0, self._thicknessMin/2.0) + att.thickness = (self._thicknessMin/2.0, self._thicknessMin/2.0) i = i+1 it.increment() @@ -172,10 +169,9 @@ class pyDecreasingThicknessShader(StrokeShader): it = stroke.strokeVerticesBegin() it_end = stroke.strokeVerticesEnd() while it.isEnd() == 0: - att = it.getObject().attribute() c = float(i)/float(n) t = (1.0 - c)*tMax +c*tMin - att.setThickness(t/2.0, t/2.0) + it.getObject().attribute.thickness = (t/2.0, t/2.0) i = i+1 it.increment() @@ -198,14 +194,13 @@ class pyNonLinearVaryingThicknessShader(StrokeShader): it = stroke.strokeVerticesBegin() it_end = stroke.strokeVerticesEnd() while it.isEnd() == 0: - att = it.getObject().attribute() if(i < float(n)/2.0): c = float(i)/float(n) else: c = float(n-i)/float(n) c = smoothC(c, self._exponent) t = (1.0 - c)*self._thicknessMax + c * self._thicknessMin - att.setThickness(t/2.0, t/2.0) + it.getObject().attribute.thickness = (t/2.0, t/2.0) i = i+1 it.increment() @@ -231,13 +226,12 @@ class pySLERPThicknessShader(StrokeShader): it = stroke.strokeVerticesBegin() it_end = stroke.strokeVerticesEnd() while it.isEnd() == 0: - att = it.getObject().attribute() c = float(i)/float(n) if(i < float(n)/2.0): t = sin((1-c)*self._omega)/sinh(self._omega)*self._thicknessMin + sin(c*self._omega)/sinh(self._omega) * maxT else: t = sin((1-c)*self._omega)/sinh(self._omega)*maxT + sin(c*self._omega)/sinh(self._omega) * self._thicknessMin - att.setThickness(t/2.0, t/2.0) + it.getObject().attribute.thickness = (t/2.0, t/2.0) i = i+1 it.increment() @@ -266,33 +260,30 @@ class pyTVertexThickenerShader(StrokeShader): ## FIXME it3 = StrokeVertexIterator(it) count = 0 while (it3.isEnd() == 0 and count < n): - att = it3.getObject().attribute() - tr = att.getThicknessR(); - tl = att.getThicknessL(); + att = it3.getObject().attribute + (tr, tl) = att.thickness r = (a-1.0)/float(n-1)*(float(n)/float(count+1) - 1) + 1 #r = (1.0-a)/float(n-1)*count + a - att.setThickness(r*tr, r*tl) + att.thickness = (r*tr, r*tl) it3.increment() count = count + 1 if(it2.isEnd()): it4 = StrokeVertexIterator(it) count = 0 while (it4.isBegin() == 0 and count < n): - att = it4.getObject().attribute() - tr = att.getThicknessR(); - tl = att.getThicknessL(); + att = it4.getObject().attribute + (tr, tl) = att.thickness r = (a-1.0)/float(n-1)*(float(n)/float(count+1) - 1) + 1 #r = (1.0-a)/float(n-1)*count + a - att.setThickness(r*tr, r*tl) + att.thickness = (r*tr, r*tl) it4.decrement() count = count + 1 if ((it4.isBegin() == 1)): - att = it4.getObject().attribute() - tr = att.getThicknessR(); - tl = att.getThicknessL(); + att = it4.getObject().attribute + (tr, tl) = att.thickness r = (a-1.0)/float(n-1)*(float(n)/float(count+1) - 1) + 1 #r = (1.0-a)/float(n-1)*count + a - att.setThickness(r*tr, r*tl) + att.thickness = (r*tr, r*tl) it.increment() class pyImportance2DThicknessShader(StrokeShader): @@ -317,10 +308,9 @@ class pyImportance2DThicknessShader(StrokeShader): k = self._kmin else: k = (self._kmax*(self._w-d) + self._kmin*d)/self._w - att = v.attribute() - tr = att.getThicknessR() - tl = att.getThicknessL() - att.setThickness(k*tr/2.0, k*tl/2.0) + att = v.attribute + (tr, tl) = att.thickness + att.thickness = (k*tr/2.0, k*tl/2.0) it.increment() class pyImportance3DThicknessShader(StrokeShader): @@ -346,10 +336,9 @@ class pyImportance3DThicknessShader(StrokeShader): k = self._kmin else: k = (self._kmax*(self._w-d) + self._kmin*d)/self._w - att = v.attribute() - tr = att.getThicknessR() - tl = att.getThicknessL() - att.setThickness(k*tr/2.0, k*tl/2.0) + att = v.attribute + (tr, tl) = att.thickness + att.thickness = (k*tr/2.0, k*tl/2.0) it.increment() class pyZDependingThicknessShader(StrokeShader): @@ -376,7 +365,7 @@ class pyZDependingThicknessShader(StrokeShader): while it.isEnd() == 0: z = (self.__func(it.castToInterface0DIterator()) - z_min) * z_diff thickness = (1 - z) * self.__max + z * self.__min - it.getObject().attribute().setThickness(thickness, thickness) + it.getObject().attribute.thickness = (thickness, thickness) it.increment() @@ -396,9 +385,9 @@ class pyConstantColorShader(StrokeShader): it = stroke.strokeVerticesBegin() it_end = stroke.strokeVerticesEnd() while it.isEnd() == 0: - att = it.getObject().attribute() - att.setColor(self._r, self._g, self._b) - att.setAlpha(self._a) + att = it.getObject().attribute + att.color = (self._r, self._g, self._b) + att.alpha = self._a it.increment() #c1->c2 @@ -415,13 +404,13 @@ class pyIncreasingColorShader(StrokeShader): it = stroke.strokeVerticesBegin() it_end = stroke.strokeVerticesEnd() while it.isEnd() == 0: - att = it.getObject().attribute() + att = it.getObject().attribute c = float(inc)/float(n) - att.setColor( (1-c)*self._c1[0] + c*self._c2[0], - (1-c)*self._c1[1] + c*self._c2[1], - (1-c)*self._c1[2] + c*self._c2[2],) - att.setAlpha((1-c)*self._c1[3] + c*self._c2[3],) + att.color = ((1-c)*self._c1[0] + c*self._c2[0], + (1-c)*self._c1[1] + c*self._c2[1], + (1-c)*self._c1[2] + c*self._c2[2]) + att.alpha = (1-c)*self._c1[3] + c*self._c2[3] inc = inc+1 it.increment() @@ -439,13 +428,13 @@ class pyInterpolateColorShader(StrokeShader): it = stroke.strokeVerticesBegin() it_end = stroke.strokeVerticesEnd() while it.isEnd() == 0: - att = it.getObject().attribute() + att = it.getObject().attribute u = float(inc)/float(n) c = 1-2*(fabs(u-0.5)) - att.setColor( (1-c)*self._c1[0] + c*self._c2[0], - (1-c)*self._c1[1] + c*self._c2[1], - (1-c)*self._c1[2] + c*self._c2[2],) - att.setAlpha((1-c)*self._c1[3] + c*self._c2[3],) + att.color = ((1-c)*self._c1[0] + c*self._c2[0], + (1-c)*self._c1[1] + c*self._c2[1], + (1-c)*self._c1[2] + c*self._c2[2]) + att.alpha = (1-c)*self._c1[3] + c*self._c2[3] inc = inc+1 it.increment() @@ -511,8 +500,7 @@ class pyMaterialColorShader(StrokeShader): g = max(0,g) b = max(0,b) - att = it.getObject().attribute() - att.setColor(r, g, b) + it.getObject().attribute.color = (r, g, b) it.increment() class pyRandomColorShader(StrokeShader): @@ -529,7 +517,7 @@ class pyRandomColorShader(StrokeShader): print(c0, c1, c2) it = stroke.strokeVerticesBegin() while(it.isEnd() == 0): - it.getObject().attribute().setColor(c0,c1,c2) + it.getObject().attribute.color = (c0,c1,c2) it.increment() class py2DCurvatureColorShader(StrokeShader): @@ -538,18 +526,13 @@ class py2DCurvatureColorShader(StrokeShader): def shade(self, stroke): it = stroke.strokeVerticesBegin() - it_end = stroke.strokeVerticesEnd() func = Curvature2DAngleF0D() while it.isEnd() == 0: - toto = it.castToInterface0DIterator() - sv = it.getObject() - att = sv.attribute() - c = func(toto) - if (c<0): + c = func(it.castToInterface0DIterator()) + if c < 0: print("negative 2D curvature") color = 10.0 * c/3.1415 - print(color) - att.setColor(color,color,color); + it.getObject().attribute.color = (color, color, color) it.increment() class pyTimeColorShader(StrokeShader): @@ -562,8 +545,7 @@ class pyTimeColorShader(StrokeShader): it = stroke.strokeVerticesBegin() it_end = stroke.strokeVerticesEnd() while it.isEnd() == 0: - att = it.getObject().attribute() - att.setColor(c,c,c) + it.getObject().attribute.color = (c,c,c) it.increment() self._t = self._t+self._step @@ -607,8 +589,8 @@ class pyBackboneStretcherShader(StrokeShader): dn.normalize() newFirst = p0+d1*float(self._l) newLast = pn+dn*float(self._l) - v0.setPoint(newFirst) - vn.setPoint(newLast) + v0.point = newFirst + vn.point = newLast stroke.UpdateLength() class pyLengthDependingBackboneStretcherShader(StrokeShader): @@ -641,8 +623,8 @@ class pyLengthDependingBackboneStretcherShader(StrokeShader): dn.normalize() newFirst = p0+d1*float(stretch) newLast = pn+dn*float(stretch) - v0.setPoint(newFirst) - vn.setPoint(newLast) + v0.point = newFirst + vn.point = newLast stroke.UpdateLength() @@ -655,23 +637,23 @@ class pyGuidingLineShader(StrokeShader): it = stroke.strokeVerticesBegin() ## get the first vertex itlast = stroke.strokeVerticesEnd() ## itlast.decrement() ## get the last one - t = itlast.getObject().getPoint() - it.getObject().getPoint() ## tangent direction + t = itlast.getObject().point - it.getObject().point ## tangent direction itmiddle = StrokeVertexIterator(it) ## - while(itmiddle.getObject().u()<0.5): ## look for the stroke middle vertex + while(itmiddle.getObject().u<0.5): ## look for the stroke middle vertex itmiddle.increment() ## it = StrokeVertexIterator(itmiddle) it.increment() while(it.isEnd() == 0): ## position all the vertices along the tangent for the right part - it.getObject().setPoint(itmiddle.getObject().getPoint() \ - +t*(it.getObject().u()-itmiddle.getObject().u())) + it.getObject().point = itmiddle.getObject().point \ + +t*(it.getObject().u-itmiddle.getObject().u) it.increment() it = StrokeVertexIterator(itmiddle) it.decrement() while(it.isBegin() == 0): ## position all the vertices along the tangent for the left part - it.getObject().setPoint(itmiddle.getObject().getPoint() \ - -t*(itmiddle.getObject().u()-it.getObject().u())) + it.getObject().point = itmiddle.getObject().point \ + -t*(itmiddle.getObject().u-it.getObject().u) it.decrement() - it.getObject().setPoint(itmiddle.getObject().getPoint()-t*(itmiddle.getObject().u())) ## first vertex + it.getObject().point = itmiddle.getObject().point-t*itmiddle.getObject().u ## first vertex stroke.UpdateLength() @@ -692,30 +674,28 @@ class pyBackboneStretcherNoCuspShader(StrokeShader): v0 = it0.getObject() v1 = it1.getObject() if((v0.getNature() & Nature.CUSP == 0) and (v1.getNature() & Nature.CUSP == 0)): - p0 = v0.getPoint() - p1 = v1.getPoint() + p0 = v0.point + p1 = v1.point d1 = p0-p1 d1.normalize() newFirst = p0+d1*float(self._l) - v0.setPoint(newFirst) + v0.point = newFirst vn_1 = itn_1.getObject() vn = itn.getObject() if((vn.getNature() & Nature.CUSP == 0) and (vn_1.getNature() & Nature.CUSP == 0)): - pn = vn.getPoint() - pn_1 = vn_1.getPoint() + pn = vn.point + pn_1 = vn_1.point dn = pn-pn_1 dn.normalize() newLast = pn+dn*float(self._l) - vn.setPoint(newLast) + vn.point = newLast stroke.UpdateLength() -normalInfo=Normal2DF0D() -curvatureInfo=Curvature2DAngleF0D() - -def edgestopping(x, sigma): - return exp(- x*x/(2*sigma*sigma)) - class pyDiffusion2Shader(StrokeShader): + """This shader iteratively adds an offset to the position of each + stroke vertex in the direction perpendicular to the stroke direction + at the point. The offset is scaled by the 2D curvature (i.e., how + quickly the stroke curve is) at the point.""" def __init__(self, lambda1, nbIter): StrokeShader.__init__(self) self._lambda = lambda1 @@ -729,9 +709,9 @@ class pyDiffusion2Shader(StrokeShader): it = stroke.strokeVerticesBegin() while it.isEnd() == 0: v=it.getObject() - p1 = v.getPoint() + p1 = v.point p2 = self._normalInfo(it.castToInterface0DIterator())*self._lambda*self._curvatureInfo(it.castToInterface0DIterator()) - v.setPoint(p1+p2) + v.point = p1+p2 it.increment() stroke.UpdateLength() @@ -750,14 +730,15 @@ class pyTipRemoverShader(StrokeShader): it = stroke.strokeVerticesBegin() while(it.isEnd() == 0): v = it.getObject() - if((v.curvilinearAbscissa() < self._l) or (v.strokeLength()-v.curvilinearAbscissa() < self._l)): + if((v.curvilinear_abscissa < self._l) or (v.stroke_length-v.curvilinear_abscissa < self._l)): verticesToRemove.append(v) - oldAttributes.append(StrokeAttribute(v.attribute())) + oldAttributes.append(StrokeAttribute(v.attribute)) it.increment() if(originalSize-len(verticesToRemove) < 2): return for sv in verticesToRemove: stroke.RemoveVertex(sv) + stroke.UpdateLength() stroke.Resample(originalSize) if(stroke.strokeVerticesSize() != originalSize): print("pyTipRemover: Warning: resampling problem") @@ -765,8 +746,7 @@ class pyTipRemoverShader(StrokeShader): for a in oldAttributes: if(it.isEnd() == 1): break - v = it.getObject() - v.setAttribute(a) + it.getObject().attribute = a it.increment() stroke.UpdateLength() @@ -827,7 +807,7 @@ class pyHLRShader(StrokeShader): else: invisible = 0 if(invisible == 1): - v.attribute().setVisible(0) + v.attribute.visible = False it.increment() it2.increment() @@ -871,7 +851,7 @@ class pyTVertexOrientationShader(StrokeShader): if(tv != None): dir = self.findOrientation(tv, ve) #print(dir.x, dir.y) - v.attribute().setAttributeVec2f("orientation", dir) + v.attribute.setAttributeVec2f("orientation", dir) while(it2.isEnd() == 0): vprevious = it.getObject() v = it2.getObject() @@ -881,7 +861,7 @@ class pyTVertexOrientationShader(StrokeShader): if(tv != None): dir = self.findOrientation(tv, ve) #print(dir.x, dir.y) - v.attribute().setAttributeVec2f("orientation", dir) + v.attribute.setAttributeVec2f("orientation", dir) it.increment() it2.increment() ## case where the last vertex is a TVertex @@ -894,7 +874,7 @@ class pyTVertexOrientationShader(StrokeShader): if(tv != None): dir = self.findOrientation(tv, ve) #print(dir.x, dir.y) - v.attribute().setAttributeVec2f("orientation", dir) + v.attribute.setAttributeVec2f("orientation", dir) class pySinusDisplacementShader(StrokeShader): def __init__(self, f, a): @@ -911,13 +891,13 @@ class pySinusDisplacementShader(StrokeShader): v = it.getObject() #print(self._getNormal.getName()) n = self._getNormal(it.castToInterface0DIterator()) - p = v.getPoint() - u = v.u() + p = v.point + u = v.u a = self._a*(1-2*(fabs(u-0.5))) n = n*a*cos(self._f*u*6.28) #print(n.x, n.y) - v.setPoint(p+n) - #v.setPoint(v.getPoint()+n*a*cos(f*v.u())) + v.point = p+n + #v.point = v.point+n*a*cos(f*v.u) it.increment() stroke.UpdateLength() @@ -936,7 +916,7 @@ class pyPerlinNoise1DShader(StrokeShader): v = it.getObject() i = v.getProjectedX() + v.getProjectedY() nres = self.__noise.turbulence1(i, self.__freq, self.__amp, self.__oct) - v.setPoint(v.getProjectedX() + nres, v.getProjectedY() + nres) + v.point = (v.getProjectedX() + nres, v.getProjectedY() + nres) it.increment() stroke.UpdateLength() @@ -955,7 +935,7 @@ class pyPerlinNoise2DShader(StrokeShader): v = it.getObject() vec = Vector([v.getProjectedX(), v.getProjectedY()]) nres = self.__noise.turbulence2(vec, self.__freq, self.__amp, self.__oct) - v.setPoint(v.getProjectedX() + nres, v.getProjectedY() + nres) + v.point = (v.getProjectedX() + nres, v.getProjectedY() + nres) it.increment() stroke.UpdateLength() @@ -971,10 +951,10 @@ class pyBluePrintCirclesShader(StrokeShader): it = stroke.strokeVerticesBegin() if it.isEnd(): return - p_min = it.getObject().getPoint() - p_max = it.getObject().getPoint() + p_min = Vector(it.getObject().point) + p_max = Vector(it.getObject().point) while it.isEnd() == 0: - p = it.getObject().getPoint() + p = it.getObject().point if (p.x < p_min.x): p_min.x = p.x if (p.x > p_max.x): @@ -1011,7 +991,7 @@ class pyBluePrintCirclesShader(StrokeShader): c = prev_center + (center - prev_center) * t p_new.x = c.x + r * cos(2 * pi * t) p_new.y = c.y + r * sin(2 * pi * t) - it.getObject().setPoint(p_new) + it.getObject().point = p_new i = i + 1 it.increment() i = 1 @@ -1035,10 +1015,10 @@ class pyBluePrintEllipsesShader(StrokeShader): it = stroke.strokeVerticesBegin() if it.isEnd(): return - p_min = it.getObject().getPoint() - p_max = it.getObject().getPoint() + p_min = Vector(it.getObject().point) + p_max = Vector(it.getObject().point) while it.isEnd() == 0: - p = it.getObject().getPoint() + p = it.getObject().point if (p.x < p_min.x): p_min.x = p.x if (p.x > p_max.x): @@ -1070,7 +1050,7 @@ class pyBluePrintEllipsesShader(StrokeShader): c = prev_center + (center - prev_center) * t p_new.x = c.x + r.x * cos(2 * pi * t) p_new.y = c.y + r.y * sin(2 * pi * t) - it.getObject().setPoint(p_new) + it.getObject().point = p_new i = i + 1 it.increment() i = 1 @@ -1097,10 +1077,10 @@ class pyBluePrintSquaresShader(StrokeShader): it = stroke.strokeVerticesBegin() if it.isEnd(): return - p_min = it.getObject().getPoint() - p_max = it.getObject().getPoint() + p_min = Vector(it.getObject().point) + p_max = Vector(it.getObject().point) while it.isEnd() == 0: - p = it.getObject().getPoint() + p = it.getObject().point if (p.x < p_min.x): p_min.x = p.x if (p.x > p_max.x): @@ -1168,8 +1148,8 @@ class pyBluePrintSquaresShader(StrokeShader): if visible == 0: visible = 1 continue - it.getObject().setPoint(p_new) - it.getObject().attribute().setVisible(visible) + it.getObject().point = p_new + it.getObject().attribute.visible = visible if visible == 0: visible = 1 i = i + 1 @@ -1196,7 +1176,7 @@ class pyBluePrintDirectedSquaresShader(StrokeShader): p_mean = Vector([0, 0]) it = stroke.strokeVerticesBegin() while it.isEnd() == 0: - p = it.getObject().getPoint() + p = it.getObject().point p_mean = p_mean + p it.increment() sv_nb = stroke.strokeVerticesSize() @@ -1206,7 +1186,7 @@ class pyBluePrintDirectedSquaresShader(StrokeShader): p_var_xy = 0 it = stroke.strokeVerticesBegin() while it.isEnd() == 0: - p = it.getObject().getPoint() + p = it.getObject().point p_var_xx = p_var_xx + pow(p.x - p_mean.x, 2) p_var_yy = p_var_yy + pow(p.y - p_mean.y, 2) p_var_xy = p_var_xy + (p.x - p_mean.x) * (p.y - p_mean.y) @@ -1267,8 +1247,8 @@ class pyBluePrintDirectedSquaresShader(StrokeShader): p_new = p_fourth + vec_fourth * float(i - third)/float(fourth - third - 1) if i == fourth - 1: visible = 0 - it.getObject().setPoint(p_new) - it.getObject().attribute().setVisible(visible) + it.getObject().point = p_new + it.getObject().attribute.visible = visible if visible == 0: visible = 1 i = i + 1 @@ -1291,14 +1271,14 @@ class pyModulateAlphaShader(StrokeShader): def shade(self, stroke): it = stroke.strokeVerticesBegin() while it.isEnd() == 0: - alpha = it.getObject().attribute().getAlpha() - p = it.getObject().getPoint() + alpha = it.getObject().attribute.alpha + p = it.getObject().point alpha = alpha * p.y / 400 if alpha < self.__min: alpha = self.__min elif alpha > self.__max: alpha = self.__max - it.getObject().attribute().setAlpha(alpha) + it.getObject().attribute.alpha = alpha it.increment() @@ -1311,9 +1291,9 @@ class pyDummyShader(StrokeShader): it_end = stroke.strokeVerticesEnd() while it.isEnd() == 0: toto = it.castToInterface0DIterator() - att = it.getObject().attribute() - att.setColor(0.3, 0.4, 0.4) - att.setThickness(0, 5) + att = it.getObject().attribute + att.color = (0.3, 0.4, 0.4) + att.thickness = (0, 5) it.increment() class pyDebugShader(StrokeShader): diff --git a/release/scripts/freestyle/style_modules/sketchy_topology_broken.py b/release/scripts/freestyle/style_modules/sketchy_topology_broken.py index 9ec0cffcfec..e51acb93fbc 100644 --- a/release/scripts/freestyle/style_modules/sketchy_topology_broken.py +++ b/release/scripts/freestyle/style_modules/sketchy_topology_broken.py @@ -32,58 +32,19 @@ from logical_operators import * from ChainingIterators import * from shaders import * -## Backbone stretcher that leaves cusps intact to avoid cracks -class pyBackboneStretcherNoCuspShader(StrokeShader): - def __init__(self, l): - StrokeShader.__init__(self) - self._l = l - def getName(self): - return "pyBackboneStretcherNoCuspShader" - def shade(self, stroke): - it0 = stroke.strokeVerticesBegin() - it1 = StrokeVertexIterator(it0) - it1.increment() - itn = stroke.strokeVerticesEnd() - itn.decrement() - itn_1 = StrokeVertexIterator(itn) - itn_1.decrement() - v0 = it0.getObject() - v1 = it1.getObject() - if((v0.getNature() & Nature.CUSP == 0) and (v1.getNature() & Nature.CUSP == 0)): - p0 = v0.getPoint() - p1 = v1.getPoint() - d1 = p0-p1 - d1.normalize() - newFirst = p0+d1*float(self._l) - v0.setPoint(newFirst) - else: - print("got a v0 cusp") - vn_1 = itn_1.getObject() - vn = itn.getObject() - if((vn.getNature() & Nature.CUSP == 0) and (vn_1.getNature() & Nature.CUSP == 0)): - pn = vn.getPoint() - pn_1 = vn_1.getPoint() - dn = pn-pn_1 - dn.normalize() - newLast = pn+dn*float(self._l) - vn.setPoint(newLast) - else: - print("got a vn cusp") - - Operators.select(QuantitativeInvisibilityUP1D(0)) ## Chain 3 times each ViewEdge indpendantly from the ## initial objects topology Operators.bidirectionalChain(pySketchyChainingIterator(3)) -shaders_list = [ - SamplingShader(4), - SpatialNoiseShader(6, 120, 2, 1, 1), - IncreasingThicknessShader(4, 10), - SmoothingShader(100, 0.1, 0, 0.2, 0, 0, 0, 1), - pyBackboneStretcherNoCuspShader(20), - #ConstantColorShader(0.0,0.0,0.0) - IncreasingColorShader(0.2,0.2,0.2,1,0.5,0.5,0.5,1), - #IncreasingColorShader(1,0,0,1,0,1,0,1), - TextureAssignerShader(4) - ] +shaders_list = [ + SamplingShader(4), + SpatialNoiseShader(6, 120, 2, 1, 1), + IncreasingThicknessShader(4, 10), + SmoothingShader(100, 0.1, 0, 0.2, 0, 0, 0, 1), + pyBackboneStretcherNoCuspShader(20), + #ConstantColorShader(0.0,0.0,0.0) + IncreasingColorShader(0.2,0.2,0.2,1,0.5,0.5,0.5,1), + #IncreasingColorShader(1,0,0,1,0,1,0,1), + TextureAssignerShader(4) + ] Operators.create(TrueUP1D(), shaders_list) diff --git a/release/scripts/freestyle/style_modules/thickness_fof_depth_discontinuity.py b/release/scripts/freestyle/style_modules/thickness_fof_depth_discontinuity.py index 21f6c7bdf35..8394aa1bde4 100644 --- a/release/scripts/freestyle/style_modules/thickness_fof_depth_discontinuity.py +++ b/release/scripts/freestyle/style_modules/thickness_fof_depth_discontinuity.py @@ -27,36 +27,14 @@ from freestyle_init import * from logical_operators import * -from ChainingIterators import * from shaders import * -class pyDepthDiscontinuityThicknessShader(StrokeShader): - def __init__(self, min, max): - StrokeShader.__init__(self) - self.__min = float(min) - self.__max = float(max) - self.__func = ZDiscontinuityF0D() - def getName(self): - return "pyDepthDiscontinuityThicknessShader" - def shade(self, stroke): - it = stroke.strokeVerticesBegin() - z_min=0.0 - z_max=1.0 - a = (self.__max - self.__min)/(z_max-z_min) - b = (self.__min*z_max-self.__max*z_min)/(z_max-z_min) - it = stroke.strokeVerticesBegin() - while it.isEnd() == 0: - z = self.__func(it.castToInterface0DIterator()) - thickness = a*z+b - it.getObject().attribute().setThickness(thickness, thickness) - it.increment() - Operators.select(QuantitativeInvisibilityUP1D(0)) Operators.bidirectionalChain(ChainSilhouetteIterator(), NotUP1D(QuantitativeInvisibilityUP1D(0))) -shaders_list = [ - SamplingShader(1), - ConstantThicknessShader(3), - ConstantColorShader(0.0,0.0,0.0), - pyDepthDiscontinuityThicknessShader(0.8, 6) - ] -Operators.create(TrueUP1D(), shaders_list)
\ No newline at end of file +shaders_list = [ + SamplingShader(1), + ConstantThicknessShader(3), + ConstantColorShader(0.0, 0.0, 0.0), + pyDepthDiscontinuityThicknessShader(0.8, 6) + ] +Operators.create(TrueUP1D(), shaders_list) |