Welcome to mirror list, hosted at ThFree Co, Russian Federation.

git.blender.org/blender-addons.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorBastien Montagne <montagne29@wanadoo.fr>2016-02-10 22:00:44 +0300
committerBastien Montagne <montagne29@wanadoo.fr>2016-02-10 22:00:44 +0300
commit3c5d5fb1cad25d75df3a3a032e86a58957311fd2 (patch)
tree7065899ee9e1a75d79b9558607827009874f212d
parent039f3646b5a2a3a76fd43df1a2cf68bc0242f1c7 (diff)
parent6266b4139503bb614576f15ea4e90870ac5e597d (diff)
Merge branch 'master' into fbx_io_export_ignore_parentsfbx_io_export_ignore_parents
Conflicts: io_scene_fbx/import_fbx.py
-rw-r--r--add_curve_extra_objects/__init__.py7
-rw-r--r--add_curve_extra_objects/add_curve_aceous_galore.py103
-rw-r--r--add_curve_extra_objects/add_curve_spirals.py9
-rw-r--r--add_curve_extra_objects/add_curve_torus_knots.py724
-rw-r--r--add_curve_ivygen.py15
-rw-r--r--add_curve_sapling/__init__.py4
-rw-r--r--add_mesh_extra_objects/add_mesh_3d_function_surface.py21
-rw-r--r--add_mesh_extra_objects/add_mesh_honeycomb.py7
-rw-r--r--add_mesh_extra_objects/add_mesh_menger_sponge.py9
-rw-r--r--add_mesh_extra_objects/add_mesh_round_cube.py2
-rw-r--r--bone_selection_groups.py244
-rw-r--r--curve_simplify.py47
-rw-r--r--game_engine_publishing.py95
-rw-r--r--io_anim_bvh/__init__.py26
-rw-r--r--io_anim_bvh/export_bvh.py2
-rw-r--r--io_anim_bvh/import_bvh.py101
-rw-r--r--io_anim_nuke_chan/import_nuke_chan.py2
-rw-r--r--io_blend_utils/__init__.py110
-rw-r--r--io_blend_utils/bl_utils/pipe_non_blocking.py100
-rw-r--r--io_blend_utils/bl_utils/subprocess_helper.py172
-rw-r--r--io_blend_utils/blend/blendfile.py917
-rw-r--r--io_blend_utils/blend/blendfile_path_walker.py939
-rwxr-xr-xio_blend_utils/blendfile_pack.py601
-rw-r--r--io_blend_utils/utils/system.py105
-rw-r--r--io_curve_svg/import_svg.py13
-rw-r--r--io_import_images_as_planes.py58
-rw-r--r--io_import_scene_unreal_psa_psk.py4
-rw-r--r--io_mesh_stl/__init__.py39
-rw-r--r--io_mesh_uv_layout/__init__.py7
-rw-r--r--io_mesh_uv_layout/export_uv_png.py2
-rw-r--r--io_scene_fbx/__init__.py15
-rw-r--r--io_scene_fbx/data_types.py4
-rw-r--r--io_scene_fbx/export_fbx.py8
-rw-r--r--io_scene_fbx/export_fbx_bin.py80
-rw-r--r--io_scene_fbx/fbx_utils.py56
-rw-r--r--io_scene_fbx/import_fbx.py201
-rw-r--r--io_scene_obj/__init__.py16
-rw-r--r--io_scene_obj/export_obj.py64
-rw-r--r--io_scene_obj/import_obj.py207
-rw-r--r--io_scene_x3d/__init__.py16
-rw-r--r--io_scene_x3d/export_x3d.py10
-rw-r--r--io_scene_x3d/import_x3d.py2095
-rw-r--r--light_field_tools/light_field_tools.py4
-rw-r--r--mesh_bsurfaces.py10
-rw-r--r--mesh_custom_normals_tools.py90
-rw-r--r--mesh_inset/__init__.py12
-rw-r--r--mocap/mocap_constraints.py7
-rw-r--r--modules/rna_manual_reference.py756
-rw-r--r--netrender/master.py2
-rw-r--r--object_animrenderbake.py64
-rw-r--r--object_cloud_gen.py66
-rw-r--r--object_grease_scatter.py21
-rw-r--r--object_print3d_utils/export.py1
-rw-r--r--object_print3d_utils/mesh_helpers.py63
-rw-r--r--object_print3d_utils/ui.py2
-rw-r--r--presets/operator/curve.torus_knot_plus/13x8_wicker_globe.py38
-rw-r--r--presets/operator/curve.torus_knot_plus/7x6.py38
-rw-r--r--presets/operator/curve.torus_knot_plus/9x9_color.py38
-rw-r--r--presets/operator/curve.torus_knot_plus/braided_coil.py38
-rw-r--r--presets/operator/curve.torus_knot_plus/flower_mesh_(2d).py38
-rw-r--r--presets/operator/curve.torus_knot_plus/slinky_knot.py38
-rw-r--r--presets/operator/curve.torus_knot_plus/snowflake_(2d).py38
-rw-r--r--presets/operator/curve.torus_knot_plus/sun_cross_(2d).py38
-rw-r--r--presets/operator/curve.torus_knot_plus/tripple_dna.py38
-rw-r--r--presets/operator/curve.torus_knot_plus/wicker_basket.py38
-rw-r--r--render_povray/__init__.py624
-rw-r--r--render_povray/primitives.py1494
-rw-r--r--render_povray/render.py3378
-rw-r--r--render_povray/shading.py231
-rw-r--r--render_povray/ui.py617
-rw-r--r--render_renderfarmfi/__init__.py224
-rw-r--r--render_renderfarmfi/exceptions.py41
-rw-r--r--render_renderfarmfi/operators.py347
-rw-r--r--render_renderfarmfi/ore_session.py40
-rw-r--r--render_renderfarmfi/panels.py274
-rw-r--r--render_renderfarmfi/prepare.py189
-rw-r--r--render_renderfarmfi/rpc.py198
-rw-r--r--render_renderfarmfi/upload.py193
-rw-r--r--render_renderfarmfi/utils.py137
-rw-r--r--space_view3d_copy_attributes.py4
-rw-r--r--ui_layer_manager.py4
81 files changed, 12142 insertions, 4588 deletions
diff --git a/add_curve_extra_objects/__init__.py b/add_curve_extra_objects/__init__.py
index 434b7191..8f706f6f 100644
--- a/add_curve_extra_objects/__init__.py
+++ b/add_curve_extra_objects/__init__.py
@@ -22,12 +22,13 @@ bl_info = {
"name": "Extra Objects",
"author": "Multiple Authors",
"version": (0, 1),
- "blender": (2, 63, 0),
+ "blender": (2, 76, 0),
"location": "View3D > Add > Curve > Extra Objects",
"description": "Add extra curve object types",
"warning": "",
"wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/"
"Scripts/Curve/Curve_Objects",
+ "tracker_url": "https://developer.blender.org/maniphest/task/create/?project=3&type=Bug",
"category": "Add Curve"}
if "bpy" in locals():
@@ -52,7 +53,7 @@ class INFO_MT_curve_extras_add(bpy.types.Menu):
layout = self.layout
layout.operator_context = 'INVOKE_REGION_WIN'
layout.operator("mesh.curveaceous_galore",
- text="Curves Galore!")
+ text="Curve Profiles")
layout.operator("curve.spirals",
text="Spirals")
layout.operator("curve.torus_knot_plus",
@@ -60,7 +61,7 @@ class INFO_MT_curve_extras_add(bpy.types.Menu):
# Define "Extras" menu
def menu_func(self, context):
self.layout.operator("mesh.curveaceous_galore",
- text="Curves Galore!")
+ text="Curve Profiles")
self.layout.operator("curve.torus_knot_plus",
text="Torus Knot Plus")
self.layout.operator("curve.spirals",
diff --git a/add_curve_extra_objects/add_curve_aceous_galore.py b/add_curve_extra_objects/add_curve_aceous_galore.py
index 0eae5e80..13fda4be 100644
--- a/add_curve_extra_objects/add_curve_aceous_galore.py
+++ b/add_curve_extra_objects/add_curve_aceous_galore.py
@@ -34,9 +34,17 @@ bl_info = {
##------------------------------------------------------------
#### import modules
import bpy
-from bpy.props import *
-from mathutils import *
-from math import *
+from bpy.props import (
+ BoolProperty,
+ EnumProperty,
+ FloatProperty,
+ IntProperty,
+ )
+from mathutils import (
+ Matrix,
+ Vector,
+ )
+from math import sin, cos, pi
import mathutils.noise as Noise
###------------------------------------------------------------
#### Some functions to use with others:
@@ -641,13 +649,13 @@ def vertsToPoints(Verts, splineType):
return vertArray
# create new CurveObject from vertarray and splineType
-def createCurve(vertArray, self, align_matrix):
+def createCurve(context, vertArray, self, align_matrix):
# options to vars
splineType = self.outputType # output splineType 'POLY' 'NURBS' 'BEZIER'
- name = self.GalloreType # GalloreType as name
+ name = self.ProfileType # GalloreType as name
# create curve
- scene = bpy.context.scene
+ scene = context.scene
newCurve = bpy.data.curves.new(name, type = 'CURVE') # curvedatablock
newSpline = newCurve.splines.new(type = splineType) # spline
@@ -686,57 +694,55 @@ def main(context, self, align_matrix):
bpy.ops.object.select_all(action='DESELECT')
# options
- galType = self.GalloreType
+ proType = self.ProfileType
splineType = self.outputType
innerRadius = self.innerRadius
middleRadius = self.middleRadius
outerRadius = self.outerRadius
# get verts
- if galType == 'Profile':
+ if proType == 'Profile':
verts = ProfileCurve(self.ProfileCurveType,
self.ProfileCurvevar1,
self.ProfileCurvevar2)
- if galType == 'Miscellaneous':
+ if proType == 'Miscellaneous':
verts = MiscCurve(self.MiscCurveType,
self.MiscCurvevar1,
self.MiscCurvevar2,
self.MiscCurvevar3)
- if galType == 'Flower':
+ if proType == 'Flower':
verts = FlowerCurve(self.petals,
innerRadius,
outerRadius,
self.petalWidth)
- if galType == 'Star':
+ if proType == 'Star':
verts = StarCurve(self.starPoints,
innerRadius,
outerRadius,
self.starTwist)
- if galType == 'Arc':
+ if proType == 'Arc':
verts = ArcCurve(self.arcSides,
self.startAngle,
self.endAngle,
innerRadius,
outerRadius,
self.arcType)
- if galType == 'Cogwheel':
+ if proType == 'Cogwheel':
verts = CogCurve(self.teeth,
innerRadius,
middleRadius,
outerRadius,
self.bevel)
- if galType == 'Nsided':
+ if proType == 'Nsided':
verts = nSideCurve(self.Nsides,
outerRadius)
-
- if galType == 'Splat':
+ if proType == 'Splat':
verts = SplatCurve(self.splatSides,
self.splatScale,
self.seed,
self.basis,
outerRadius)
-
- if galType == 'Helix':
+ if proType == 'Helix':
verts = HelixCurve(self.helixPoints,
self.helixHeight,
self.helixStart,
@@ -744,7 +750,7 @@ def main(context, self, align_matrix):
self.helixWidth,
self.helix_a,
self.helix_b)
- if galType == 'Cycloid':
+ if proType == 'Cycloid':
verts = CycloidCurve(self.cycloPoints,
self.cyclo_d,
self.cycloType,
@@ -757,21 +763,21 @@ def main(context, self, align_matrix):
vertArray = vertsToPoints(verts, splineType)
# create object
- createCurve(vertArray, self, align_matrix)
+ createCurve(context, vertArray, self, align_matrix)
return
class Curveaceous_galore(bpy.types.Operator):
"""Add many types of curves"""
bl_idname = "mesh.curveaceous_galore"
- bl_label = "Curveaceous galore"
+ bl_label = "Curve Profiles"
bl_options = {'REGISTER', 'UNDO', 'PRESET'}
# align_matrix for the invoke
- align_matrix = Matrix()
+ align_matrix = None
#### general properties
- GalloreTypes = [
+ ProfileTypes = [
('Profile', 'Profile', 'Profile'),
('Miscellaneous', 'Miscellaneous', 'Miscellaneous'),
('Flower', 'Flower', 'Flower'),
@@ -782,9 +788,9 @@ class Curveaceous_galore(bpy.types.Operator):
('Splat', 'Splat', 'Splat'),
('Cycloid', 'Cycloid', 'Cycloid'),
('Helix', 'Helix (3D)', 'Helix')]
- GalloreType = EnumProperty(name="Type",
+ ProfileType = EnumProperty(name="Type",
description="Form of Curve to create",
- items=GalloreTypes)
+ items=ProfileTypes)
SplineTypes = [
('POLY', 'Poly', 'POLY'),
('NURBS', 'Nurbs', 'NURBS'),
@@ -993,56 +999,62 @@ class Curveaceous_galore(bpy.types.Operator):
# general options
col = layout.column()
- col.prop(self, 'GalloreType')
- col.label(text=self.GalloreType + " Options:")
+ col.prop(self, 'ProfileType')
+ col.label(text=self.ProfileType + " Options:")
- # options per GalloreType
+ # options per ProfileType
box = layout.box()
- if self.GalloreType == 'Profile':
+ if self.ProfileType == 'Profile':
box.prop(self, 'ProfileCurveType')
box.prop(self, 'ProfileCurvevar1')
box.prop(self, 'ProfileCurvevar2')
- elif self.GalloreType == 'Miscellaneous':
+
+ elif self.ProfileType == 'Miscellaneous':
box.prop(self, 'MiscCurveType')
box.prop(self, 'MiscCurvevar1', text='Width')
box.prop(self, 'MiscCurvevar2', text='Height')
if self.MiscCurveType == 5:
box.prop(self, 'MiscCurvevar3', text='Rounded')
- elif self.GalloreType == 'Flower':
+
+ elif self.ProfileType == 'Flower':
box.prop(self, 'petals')
box.prop(self, 'petalWidth')
box.prop(self, 'innerRadius')
box.prop(self, 'outerRadius')
- elif self.GalloreType == 'Star':
+
+ elif self.ProfileType == 'Star':
box.prop(self, 'starPoints')
box.prop(self, 'starTwist')
box.prop(self, 'innerRadius')
box.prop(self, 'outerRadius')
- elif self.GalloreType == 'Arc':
+
+ elif self.ProfileType == 'Arc':
box.prop(self, 'arcSides')
box.prop(self, 'arcType') # has only one Type?
box.prop(self, 'startAngle')
box.prop(self, 'endAngle')
box.prop(self, 'innerRadius') # doesn't seem to do anything
box.prop(self, 'outerRadius')
- elif self.GalloreType == 'Cogwheel':
+
+ elif self.ProfileType == 'Cogwheel':
box.prop(self, 'teeth')
box.prop(self, 'bevel')
box.prop(self, 'innerRadius')
box.prop(self, 'middleRadius')
box.prop(self, 'outerRadius')
- elif self.GalloreType == 'Nsided':
+
+ elif self.ProfileType == 'Nsided':
box.prop(self, 'Nsides')
box.prop(self, 'outerRadius', text='Radius')
- elif self.GalloreType == 'Splat':
+ elif self.ProfileType == 'Splat':
box.prop(self, 'splatSides')
box.prop(self, 'outerRadius')
box.prop(self, 'splatScale')
box.prop(self, 'seed')
box.prop(self, 'basis')
- elif self.GalloreType == 'Helix':
+ elif self.ProfileType == 'Helix':
box.prop(self, 'helixPoints')
box.prop(self, 'helixHeight')
box.prop(self, 'helixWidth')
@@ -1050,7 +1062,8 @@ class Curveaceous_galore(bpy.types.Operator):
box.prop(self, 'helixEnd')
box.prop(self, 'helix_a')
box.prop(self, 'helix_b')
- elif self.GalloreType == 'Cycloid':
+
+ elif self.ProfileType == 'Cycloid':
box.prop(self, 'cycloPoints')
#box.prop(self, 'cycloType') # needs the other types first
box.prop(self, 'cycloStart')
@@ -1081,7 +1094,6 @@ class Curveaceous_galore(bpy.types.Operator):
box.row().prop(self, 'handleType', expand=True)
#box.prop(self, 'use_cyclic_u')
-
##### POLL #####
@classmethod
def poll(cls, context):
@@ -1090,26 +1102,25 @@ class Curveaceous_galore(bpy.types.Operator):
##### EXECUTE #####
def execute(self, context):
# turn off undo
- undo = bpy.context.user_preferences.edit.use_global_undo
- bpy.context.user_preferences.edit.use_global_undo = False
+ undo = context.user_preferences.edit.use_global_undo
+ context.user_preferences.edit.use_global_undo = False
# deal with 2D - 3D curve differences
- if self.GalloreType in ['Helix', 'Cycloid']:
+ if self.ProfileType in ['Helix', 'Cycloid']:
self.shape = '3D'
#else:
#self.shape = '2D' # someone decide if we want this
- if self.GalloreType in ['Helix']:
+ if self.ProfileType in ['Helix']:
self.use_cyclic_u = False
else:
self.use_cyclic_u = True
-
# main function
- main(context, self, self.align_matrix)
+ main(context, self, self.align_matrix or Matrix())
# restore pre operator undo state
- bpy.context.user_preferences.edit.use_global_undo = undo
+ context.user_preferences.edit.use_global_undo = undo
return {'FINISHED'}
diff --git a/add_curve_extra_objects/add_curve_spirals.py b/add_curve_extra_objects/add_curve_spirals.py
index 11d86eb7..a07ef411 100644
--- a/add_curve_extra_objects/add_curve_spirals.py
+++ b/add_curve_extra_objects/add_curve_spirals.py
@@ -14,7 +14,11 @@
}
'''
import bpy, time
-from bpy.props import *
+from bpy.props import (
+ BoolProperty,
+ FloatProperty,
+ IntProperty,
+ )
from math import sin, cos, pi, exp
from bpy_extras.object_utils import AddObjectHelper, object_data_add
@@ -170,8 +174,7 @@ def draw_curve(props, context):
spline.points.add( len(verts)*0.25-1 ) #Add only one quarter of points as elements in verts, because verts looks like: "x,y,z,?,x,y,z,?,x,..."
spline.points.foreach_set('co', verts)
-# new_obj = object_data_add(bpy.context, curve_data)
- new_obj = object_data_add(context, curve_data)
+ new_obj = object_data_add(context, curve_data)
class spirals(bpy.types.Operator):
bl_idname = "curve.spirals"
diff --git a/add_curve_extra_objects/add_curve_torus_knots.py b/add_curve_extra_objects/add_curve_torus_knots.py
index 22fe48fe..7423be04 100644
--- a/add_curve_extra_objects/add_curve_torus_knots.py
+++ b/add_curve_extra_objects/add_curve_torus_knots.py
@@ -19,9 +19,9 @@
'''
bl_info = {
"name": "Torus Knots",
- "author": "testscreenings",
- "version": (0, 1),
- "blender": (2, 59, 0),
+ "author": "Marius Giurgi (DolphinDream), testscreenings",
+ "version": (0, 2),
+ "blender": (2, 76, 0),
"location": "View3D > Add > Curve",
"description": "Adds many types of (torus) knots",
"warning": "",
@@ -30,194 +30,664 @@ bl_info = {
"category": "Add Curve"}
'''
-##------------------------------------------------------------
+# ------------------------------------------------------------------------------
+
#### import modules
import bpy
-from bpy.props import *
-from math import sin, cos, pi
+from bpy.props import BoolProperty, EnumProperty, FloatProperty, IntProperty
+from math import sin, cos, pi, sqrt
+from mathutils import *
from bpy_extras.object_utils import AddObjectHelper, object_data_add
+from random import random
+
+DEBUG = False
+
+# greatest common denominator
+def gcd(a, b):
+ if b == 0:
+ return a
+ else:
+ return gcd(b, a % b)
########################################################################
####################### Knot Definitions ###############################
########################################################################
-def Torus_Knot(self):
- p = self.torus_p
- q = self.torus_q
- w = self.torus_w
- res = self.torus_res
- h = self.torus_h
- u = self.torus_u
- v = self.torus_v
- rounds = self.torus_rounds
+def Torus_Knot(self, linkIndex=0):
+ p = self.torus_p # revolution count (around the torus center)
+ q = self.torus_q # spin count (around the torus tube)
- newPoints = []
- angle = 2*rounds
- step = angle/(res-1)
- scale = h
- height = w
+ N = self.torus_res # curve resolution (number of control points)
+
+ # use plus options only when they are enabled
+ if self.options_plus:
+ u = self.torus_u # p multiplier
+ v = self.torus_v # q multiplier
+ h = self.torus_h # height (scale along Z)
+ s = self.torus_s # torus scale (radii scale factor)
+ else: # don't use plus settings
+ u = 1
+ v = 1
+ h = 1
+ s = 1
+
+ R = self.torus_R * s # major radius (scaled)
+ r = self.torus_r * s # minor radius (scaled)
+
+ # number of decoupled links when (p,q) are NOT co-primes
+ links = gcd(p,q) # = 1 when (p,q) are co-primes
+
+ # parametrized angle increment (cached outside of the loop for performance)
+ # NOTE: the total angle is divided by number of decoupled links to ensure
+ # the curve does not overlap with itself when (p,q) are not co-primes
+ da = 2*pi/links/(N-1)
+
+ # link phase : each decoupled link is phased equally around the torus center
+ # NOTE: linkIndex value is in [0, links-1]
+ linkPhase = 2*pi/q * linkIndex # = 0 when there is just ONE link
+
+ # user defined phasing
+ if self.options_plus:
+ rPhase = self.torus_rP # user defined revolution phase
+ sPhase = self.torus_sP # user defined spin phase
+ else: # don't use plus settings
+ rPhase = 0
+ sPhase = 0
- for i in range(res-1):
- t = ( i*step*pi)
+ rPhase += linkPhase # total revolution phase of the current link
- x = (2 * scale + cos((q*t)/p*v)) * cos(t * u)
- y = (2 * scale + cos((q*t)/p*v)) * sin(t * u)
- z = sin(q*t/p) * height
+ if DEBUG:
+ print("")
+ print("Link: %i of %i" % (linkIndex, links))
+ print("gcd = %i" % links)
+ print("p = %i" % p)
+ print("q = %i" % q)
+ print("link phase = %.2f deg" % (linkPhase * 180/pi))
+ print("link phase = %.2f rad" % linkPhase)
- newPoints.extend([x,y,z,1])
+ # flip directions ? NOTE: flipping both is equivalent to no flip
+ if self.flip_p: p*=-1
+ if self.flip_q: q*=-1
+
+ # create the 3D point array for the current link
+ newPoints = []
+ for n in range(N-1):
+ # t = 2*pi / links * n/(N-1) with: da = 2*pi/links/(N-1) => t = n * da
+ t = n * da
+ theta = p*t*u + rPhase # revolution angle
+ phi = q*t*v + sPhase # spin angle
+
+ x = (R + r*cos(phi)) * cos(theta)
+ y = (R + r*cos(phi)) * sin(theta)
+ z = r*sin(phi) * h
+
+ # append 3D point
+ # NOTE : the array is adjusted later as needed to 4D for POLY and NURBS
+ newPoints.append([x,y,z])
return newPoints
+# ------------------------------------------------------------------------------
+# Calculate the align matrix for the new object (based on user preferences)
+def align_matrix(self, context):
+ if self.absolute_location:
+ loc = Matrix.Translation(Vector((0,0,0)))
+ else:
+ loc = Matrix.Translation(context.scene.cursor_location)
+
+ # user defined location & translation
+ userLoc = Matrix.Translation(self.location)
+ userRot = self.rotation.to_matrix().to_4x4()
+
+ obj_align = context.user_preferences.edit.object_align
+ if (context.space_data.type == 'VIEW_3D' and obj_align == 'VIEW'):
+ rot = context.space_data.region_3d.view_matrix.to_3x3().inverted().to_4x4()
+ else:
+ rot = Matrix()
+
+ align_matrix = userLoc * loc * rot * userRot
+ return align_matrix
+
+# ------------------------------------------------------------------------------
+# Set curve BEZIER handles to auto
+def setBezierHandles(obj, mode = 'AUTOMATIC'):
+ scene = bpy.context.scene
+ if obj.type != 'CURVE':
+ return
+ scene.objects.active = obj
+ bpy.ops.object.mode_set(mode='EDIT', toggle=True)
+ bpy.ops.curve.select_all(action='SELECT')
+ bpy.ops.curve.handle_type_set(type=mode)
+ bpy.ops.object.mode_set(mode='OBJECT', toggle=True)
+
+# ------------------------------------------------------------------------------
+# Convert array of vert coordinates to points according to spline type
+def vertsToPoints(Verts, splineType):
+ # main vars
+ vertArray = []
-##------------------------------------------------------------
-# Main Function
+ # array for BEZIER spline output (V3)
+ if splineType == 'BEZIER':
+ for v in Verts:
+ vertArray += v
+
+ # array for non-BEZIER output (V4)
+ else:
+ for v in Verts:
+ vertArray += v
+ if splineType == 'NURBS':
+ vertArray.append(1) # for NURBS w=1
+ else: # for POLY w=0
+ vertArray.append(0)
+
+ return vertArray
+
+# ------------------------------------------------------------------------------
+# Create the Torus Knot curve and object and add it to the scene
def create_torus_knot(self, context):
- verts = Torus_Knot(self)
-
- curve_data = bpy.data.curves.new(name='Torus Knot', type='CURVE')
- spline = curve_data.splines.new(type='NURBS')
- spline.points.add(int(len(verts)*0.25 - 1))
- spline.points.foreach_set('co', verts)
- spline.use_endpoint_u = True
- spline.use_cyclic_u = True
- spline.order_u = 4
+ # pick a name based on (p,q) parameters
+ aName = "Torus Knot %i x %i" % (self.torus_p, self.torus_q)
+
+ # create curve
+ curve_data = bpy.data.curves.new(name=aName, type='CURVE')
+
+ # setup materials to be used for the TK links
+ if self.use_colors:
+ addLinkColors(self, curve_data)
+
+ # create torus knot link(s)
+ if self.multiple_links:
+ links = gcd(self.torus_p, self.torus_q);
+ else:
+ links = 1;
+
+ for l in range(links):
+ # get vertices for the current link
+ verts = Torus_Knot(self, l)
+
+ # output splineType 'POLY' 'NURBS' or 'BEZIER'
+ splineType = self.outputType
+
+ # turn verts into proper array (based on spline type)
+ vertArray = vertsToPoints(verts, splineType)
+
+ # create spline from vertArray (based on spline type)
+ spline = curve_data.splines.new(type=splineType)
+ if splineType == 'BEZIER':
+ spline.bezier_points.add(int(len(vertArray)*1.0/3-1))
+ spline.bezier_points.foreach_set('co', vertArray)
+ else:
+ spline.points.add(int(len(vertArray)*1.0/4 - 1))
+ spline.points.foreach_set('co', vertArray)
+ spline.use_endpoint_u = True
+
+ # set curve options
+ spline.use_cyclic_u = True
+ spline.order_u = 4
+
+ # set a color per link
+ if self.use_colors:
+ spline.material_index = l
+
curve_data.dimensions = '3D'
+ curve_data.resolution_u = self.segment_res
- if self.geo_surf:
+ # create surface ?
+ if self.geo_surface:
+ curve_data.fill_mode = 'FULL'
curve_data.bevel_depth = self.geo_bDepth
curve_data.bevel_resolution = self.geo_bRes
- curve_data.fill_mode = 'FULL'
curve_data.extrude = self.geo_extrude
- #curve_data.offset = self.geo_width # removed, somehow screws things up all of a sudden
- curve_data.resolution_u = self.geo_res
+ curve_data.offset = self.geo_offset
+
+ new_obj = bpy.data.objects.new(aName, curve_data)
+
+ # set object in the scene
+ scene = bpy.context.scene
+ scene.objects.link(new_obj) # place in active scene
+ new_obj.select = True # set as selected
+ scene.objects.active = new_obj # set as active
+ new_obj.matrix_world = self.align_matrix # apply matrix
+
+ # set BEZIER handles
+ if splineType == 'BEZIER':
+ setBezierHandles(new_obj, self.handleType)
+
+ return
- new_obj = object_data_add(context, curve_data, operator=self)
+# ------------------------------------------------------------------------------
+# Create materials to be assigned to each TK link
+def addLinkColors(self, curveData):
+ # some predefined colors for the torus knot links
+ colors = []
+ if self.colorSet == "1": # RGBish
+ colors += [ [0.0, 0.0, 1.0] ]
+ colors += [ [0.0, 1.0, 0.0] ]
+ colors += [ [1.0, 0.0, 0.0] ]
+ colors += [ [1.0, 1.0, 0.0] ]
+ colors += [ [0.0, 1.0, 1.0] ]
+ colors += [ [1.0, 0.0, 1.0] ]
+ colors += [ [1.0, 0.5, 0.0] ]
+ colors += [ [0.0, 1.0, 0.5] ]
+ colors += [ [0.5, 0.0, 1.0] ]
+ else: # RainBow
+ colors += [ [0.0, 0.0, 1.0] ]
+ colors += [ [0.0, 0.5, 1.0] ]
+ colors += [ [0.0, 1.0, 1.0] ]
+ colors += [ [0.0, 1.0, 0.5] ]
+ colors += [ [0.0, 1.0, 0.0] ]
+ colors += [ [0.5, 1.0, 0.0] ]
+ colors += [ [1.0, 1.0, 0.0] ]
+ colors += [ [1.0, 0.5, 0.0] ]
+ colors += [ [1.0, 0.0, 0.0] ]
+ me = curveData
+ mat_offset = len(me.materials)
+ links = gcd(self.torus_p, self.torus_q)
+ mats = []
+ for i in range(links):
+ matName = "TorusKnot-Link-%i" % i
+ matListNames = bpy.data.materials.keys()
+ # create the material
+ if matName not in matListNames:
+ if DEBUG: print("Creating new material : %s" % matName)
+ mat = bpy.data.materials.new(matName)
+ else:
+ if DEBUG: print("Material %s already exists" % matName)
+ mat = bpy.data.materials[matName]
+ # set material color
+ if self.options_plus and self.random_colors:
+ mat.diffuse_color = random(), random(), random()
+ else:
+ cID = i % (len(colors)) # cycle through predefined colors
+ mat.diffuse_color = colors[cID]
+
+ if self.options_plus:
+ mat.diffuse_color.s = self.saturation
+ else:
+ mat.diffuse_color.s = 0.75
+
+ me.materials.append(mat)
+
+# ------------------------------------------------------------------------------
+# Main Torus Knot class
class torus_knot_plus(bpy.types.Operator, AddObjectHelper):
""""""
bl_idname = "curve.torus_knot_plus"
bl_label = "Torus Knot +"
bl_options = {'REGISTER', 'UNDO', 'PRESET'}
- bl_description = "adds many types of knots"
-
- #### general options
- options_plus = BoolProperty(name="plus options",
- default=False,
- description="Show more options (the plus part)")
-
- #### GEO Options
- geo_surf = BoolProperty(name="Surface",
- default=True)
- geo_bDepth = FloatProperty(name="bevel",
- default=0.08,
- min=0, soft_min=0)
- geo_bRes = IntProperty(name="bevel res",
- default=2,
- min=0, soft_min=0,
- max=4, soft_max=4)
- geo_extrude = FloatProperty(name="extrude",
- default=0.0,
- min=0, soft_min=0)
- geo_res = IntProperty(name="resolution",
- default=12,
- min=1, soft_min=1)
-
-
- #### Parameters
- torus_res = IntProperty(name="Resoulution",
- default=100,
- min=3, soft_min=3,
- description='Resolution, Number of controlverticies')
- torus_p = IntProperty(name="p",
- default=2,
- min=1, soft_min=1,
- #max=1, soft_max=1,
- description="p")
- torus_q = IntProperty(name="q",
- default=3,
- min=1, soft_min=1,
- #max=1, soft_max=1,
- description="q")
- torus_w = FloatProperty(name="Height",
- default=1,
- #min=0, soft_min=0,
- #max=1, soft_max=1,
- description="Height in Z")
- torus_h = FloatProperty(name="Scale",
- default=1,
- #min=0, soft_min=0,
- #max=1, soft_max=1,
- description="Scale, in XY")
- torus_u = IntProperty(name="u",
- default=1,
- min=1, soft_min=1,
- #max=1, soft_max=1,
- description="u")
- torus_v = IntProperty(name="v",
- default=1,
- min=1, soft_min=1,
- #max=1, soft_max=1,
- description="v")
- torus_rounds = IntProperty(name="Rounds",
- default=2,
- min=1, soft_min=1,
- #max=1, soft_max=1,
- description="Rounds")
+ bl_description = "Adds many types of tours knots"
+ bl_context = "object"
+
+ def mode_update_callback(self, context):
+ # keep the equivalent radii sets (R,r)/(eR,iR) in sync
+ if self.mode == 'EXT_INT':
+ self.torus_eR = self.torus_R + self.torus_r
+ self.torus_iR = self.torus_R - self.torus_r
+
+ # align_matrix for the invoke
+ align_matrix = None
+
+ #### GENERAL options
+ options_plus = BoolProperty(
+ name="Extra Options",
+ default=False,
+ description="Show more options (the plus part)",
+ )
+ absolute_location = BoolProperty(
+ name= "Absolute Location",
+ default=False,
+ description="Set absolute location instead of relative to 3D cursor",
+ )
+
+ #### COLOR options
+ use_colors = BoolProperty(
+ name="Use Colors",
+ default=False,
+ description="Show torus links in colors",
+ )
+ colorSet = EnumProperty(
+ name="Color Set",
+ items= (('1', 'RGBish', 'RGBsish ordered colors'),
+ ('2', 'Rainbow', 'Rainbow ordered colors')),
+ )
+ random_colors = BoolProperty(
+ name="Randomize Colors",
+ default=False,
+ description="Randomize link colors",
+ )
+ saturation = FloatProperty(
+ name="Saturation",
+ default=0.75,
+ min=0.0, max=1.0,
+ description="Color saturation",
+ )
+
+ #### SURFACE Options
+ geo_surface = BoolProperty(
+ name="Surface",
+ default=True,
+ description="Create surface",
+ )
+ geo_bDepth = FloatProperty(
+ name="Bevel Depth",
+ default=0.04,
+ min=0, soft_min=0,
+ description="Bevel Depth",
+ )
+ geo_bRes = IntProperty(
+ name="Bevel Resolution",
+ default=2,
+ min=0, soft_min=0,
+ max=5, soft_max=5,
+ description="Bevel Resolution"
+ )
+ geo_extrude = FloatProperty(
+ name="Extrude",
+ default=0.0,
+ min=0, soft_min=0,
+ description="Amount of curve extrusion"
+ )
+ geo_offset = FloatProperty(
+ name="Offset",
+ default=0.0,
+ min=0, soft_min=0,
+ description="Offset the surface relative to the curve"
+ )
+
+ #### TORUS KNOT Options
+ torus_p = IntProperty(
+ name="p",
+ default=2,
+ min=1, soft_min=1,
+ description="Number of REVOLUTIONs around the torus hole before closing the knot"
+ )
+ torus_q = IntProperty(
+ name="q",
+ default=3,
+ min=1, soft_min=1,
+ description="Number of SPINs through the torus hole before closing the knot"
+ )
+ flip_p = BoolProperty(
+ name="Flip p",
+ default=False,
+ description="Flip REVOLUTION direction"
+ )
+ flip_q = BoolProperty(
+ name="Flip q",
+ default=False,
+ description="Flip SPIN direction"
+ )
+ multiple_links = BoolProperty(
+ name="Multiple Links",
+ default=True,
+ description="Generate ALL links or just ONE link when q and q are not co-primes"
+ )
+ torus_u = IntProperty(
+ name="p multiplier",
+ default=1,
+ min=1, soft_min=1,
+ description="p multiplier"
+ )
+ torus_v = IntProperty(
+ name="q multiplier",
+ default=1,
+ min=1, soft_min=1,
+ description="q multiplier"
+ )
+ torus_rP = FloatProperty(
+ name="Revolution Phase",
+ default=0.0,
+ min=0.0, soft_min=0.0,
+ description="Phase revolutions by this radian amount"
+ )
+ torus_sP = FloatProperty(
+ name="Spin Phase",
+ default=0.0,
+ min=0.0, soft_min=0.0,
+ description="Phase spins by this radian amount"
+ )
+
+ #### TORUS DIMENSIONS options
+ mode = EnumProperty(
+ name="Torus Dimensions",
+ items=(("MAJOR_MINOR", "Major/Minor",
+ "Use the Major/Minor radii for torus dimensions."),
+ ("EXT_INT", "Exterior/Interior",
+ "Use the Exterior/Interior radii for torus dimensions.")),
+ update=mode_update_callback,
+ )
+ torus_R = FloatProperty(
+ name="Major Radius",
+ min=0.00, max=100.0,
+ default=1.0,
+ subtype='DISTANCE',
+ unit='LENGTH',
+ description="Radius from the torus origin to the center of the cross section"
+ )
+ torus_r = FloatProperty(
+ name="Minor Radius",
+ min=0.00, max=100.0,
+ default=.25,
+ subtype='DISTANCE',
+ unit='LENGTH',
+ description="Radius of the torus' cross section"
+ )
+ torus_iR = FloatProperty(
+ name="Interior Radius",
+ min=0.00, max=100.0,
+ default=.75,
+ subtype='DISTANCE',
+ unit='LENGTH',
+ description="Interior radius of the torus (closest to the torus center)"
+ )
+ torus_eR = FloatProperty(
+ name="Exterior Radius",
+ min=0.00, max=100.0,
+ default=1.25,
+ subtype='DISTANCE',
+ unit='LENGTH',
+ description="Exterior radius of the torus (farthest from the torus center)"
+ )
+ torus_s = FloatProperty(
+ name="Scale",
+ min=0.01, max=100.0,
+ default=1.00,
+ description="Scale factor to multiply the radii"
+ )
+ torus_h = FloatProperty(
+ name="Height",
+ default=1.0,
+ min=0.0, max=100.0,
+ description="Scale along the local Z axis"
+ )
+
+ #### CURVE options
+ torus_res = IntProperty(
+ name="Curve Resolution",
+ default=100,
+ min=3, soft_min=3,
+ description="Number of control vertices in the curve"
+ )
+ segment_res = IntProperty(
+ name="Segment Resolution",
+ default=12,
+ min=1, soft_min=1,
+ description="Curve subdivisions per segment"
+ )
+ SplineTypes = [
+ ('POLY', 'Poly', 'POLY'),
+ ('NURBS', 'Nurbs', 'NURBS'),
+ ('BEZIER', 'Bezier', 'BEZIER')]
+ outputType = EnumProperty(
+ name="Output splines",
+ default='BEZIER',
+ description="Type of splines to output",
+ items=SplineTypes,
+ )
+ bezierHandles = [
+ ('VECTOR', 'Vector', 'VECTOR'),
+ ('AUTOMATIC', 'Auto', 'AUTOMATIC'),
+ ]
+ handleType = EnumProperty(
+ name="Handle type",
+ default='AUTOMATIC',
+ items=bezierHandles,
+ description="Bezier handle type",
+ )
+ adaptive_resolution = BoolProperty(
+ name="Adaptive Resolution",
+ default=False,
+ description="Auto adjust curve resolution based on TK length",
+ )
##### DRAW #####
def draw(self, context):
layout = self.layout
- # general options
- layout.label(text="Torus Knot Parameters:")
+ # extra parameters toggle
+ layout.prop(self, 'options_plus')
- # Parameters
+ # TORUS KNOT Parameters
+ col = layout.column()
+ col.label(text="Torus Knot Parameters:")
box = layout.box()
- box.prop(self, 'torus_res')
- box.prop(self, 'torus_w')
- box.prop(self, 'torus_h')
- box.prop(self, 'torus_p')
- box.prop(self, 'torus_q')
- box.prop(self, 'options_plus')
+ row = box.row()
+ row.column().prop(self, 'torus_p')
+ row.column().prop(self, 'flip_p')
+ row = box.row()
+ row.column().prop(self, 'torus_q')
+ row.column().prop(self, 'flip_q')
+
+ links = gcd(self.torus_p, self.torus_q)
+ info = "Multiple Links"
+ if links > 1: info += " ( " + str(links) + " )"
+ box.prop(self, 'multiple_links', text=info)
+
if self.options_plus:
+ box = box.box()
box.prop(self, 'torus_u')
box.prop(self, 'torus_v')
- box.prop(self, 'torus_rounds')
+ box.prop(self, 'torus_rP')
+ box.prop(self, 'torus_sP')
+
+ # TORUS DIMENSIONS options
+ col = layout.column(align=True)
+ col.label(text="Torus Dimensions:")
+ box = layout.box()
+ col = box.column(align=True)
+ col.row().prop(self, "mode", expand=True)
+
+ if self.mode == 'MAJOR_MINOR':
+ col = box.column(align=True)
+ col.prop(self, "torus_R")
+
+ col = box.column(align=True)
+ col.prop(self, "torus_r")
+ else: # EXTERIOR-INTERIOR
+ col = box.column(align=True)
+ col.prop(self, "torus_eR")
- # surface Options
+ col = box.column(align=True)
+ col.prop(self, "torus_iR")
+
+ if self.options_plus:
+ box = box.box()
+ box.prop(self, 'torus_s')
+ box.prop(self, 'torus_h')
+
+ # CURVE options
+ col = layout.column(align=True)
+ col.label(text="Curve Options:")
+ box = layout.box()
+
+ col = box.column()
+ col.label(text="Output Curve Type:")
+ col.row().prop(self, 'outputType', expand=True)
+
+ depends=box.column()
+ depends.prop(self, 'torus_res')
+ # deactivate the "curve resolution" if "adaptive resolution" is enabled
+ depends.enabled = not self.adaptive_resolution
+
+ box.prop(self, 'adaptive_resolution')
+ box.prop(self, 'segment_res')
+
+ # SURFACE options
col = layout.column()
col.label(text="Geometry Options:")
box = layout.box()
- box.prop(self, 'geo_surf')
- if self.geo_surf:
+ box.prop(self, 'geo_surface')
+ if self.geo_surface:
box.prop(self, 'geo_bDepth')
box.prop(self, 'geo_bRes')
box.prop(self, 'geo_extrude')
- box.prop(self, 'geo_res')
+ box.prop(self, 'geo_offset')
+
+ # COLOR options
+ col = layout.column()
+ col.label(text="Color Options:")
+ box = layout.box()
+ box.prop(self, 'use_colors')
+ if self.use_colors and self.options_plus:
+ box = box.box()
+ box.prop(self, 'colorSet')
+ box.prop(self, 'random_colors')
+ box.prop(self, 'saturation')
+ # TRANSFORM options
col = layout.column()
- col.prop(self, 'location')
- col.prop(self, 'rotation')
+ col.label(text="Transform Options:")
+ box = col.box()
+ box.prop(self, 'location')
+ box.prop(self, 'absolute_location')
+ box.prop(self, 'rotation')
##### POLL #####
@classmethod
def poll(cls, context):
+ if context.mode != "OBJECT": return False
return context.scene != None
##### EXECUTE #####
def execute(self, context):
+ if self.mode == 'EXT_INT':
+ # adjust the equivalent radii pair : (R,r) <=> (eR,iR)
+ self.torus_R = (self.torus_eR + self.torus_iR)*0.5
+ self.torus_r = (self.torus_eR - self.torus_iR)*0.5
+
+ if self.adaptive_resolution:
+ # adjust curve resolution automatically based on (p,q,R,r) values
+ p = self.torus_p
+ q = self.torus_q
+ R = self.torus_R
+ r = self.torus_r
+ links = gcd(p,q)
+ # get an approximate length of the whole TK curve
+ maxTKLen = 2*pi*sqrt(p*p*(R+r)*(R+r) + q*q*r*r) # upper bound approximation
+ minTKLen = 2*pi*sqrt(p*p*(R-r)*(R-r) + q*q*r*r) # lower bound approximation
+ avgTKLen = (minTKLen + maxTKLen)/2 # average approximation
+ if DEBUG: print("Approximate average TK length = %.2f" % avgTKLen)
+ self.torus_res = max(3, avgTKLen/links * 8) # x N factor = control points per unit length
+
+ # update align matrix
+ self.align_matrix = align_matrix(self, context)
+
# turn off undo
undo = bpy.context.user_preferences.edit.use_global_undo
bpy.context.user_preferences.edit.use_global_undo = False
- if not self.options_plus:
- self.torus_rounds = self.torus_p
-
- #recoded for add_utils
+ # create the curve
create_torus_knot(self, context)
# restore pre operator undo state
bpy.context.user_preferences.edit.use_global_undo = undo
return {'FINISHED'}
+
+ ##### INVOKE #####
+ def invoke(self, context, event):
+ self.execute(context)
+
+ return {'FINISHED'}
diff --git a/add_curve_ivygen.py b/add_curve_ivygen.py
index 6b359e14..ac457172 100644
--- a/add_curve_ivygen.py
+++ b/add_curve_ivygen.py
@@ -402,16 +402,16 @@ def adhesion(loc, ob, max_l):
# Compute the adhesion vector by finding the nearest point
nearest_result = ob.closest_point_on_mesh(tran_loc, max_l)
adhesion_vector = Vector((0.0, 0.0, 0.0))
- if nearest_result[2] != -1:
+ if nearest_result[0]:
# Compute the distance to the nearest point
- adhesion_vector = ob.matrix_world * nearest_result[0] - loc
+ adhesion_vector = ob.matrix_world * nearest_result[1] - loc
distance = adhesion_vector.length
# If it's less than the maximum allowed and not 0, continue
if distance:
# Compute the direction vector between the closest point and loc
adhesion_vector.normalize()
adhesion_vector *= 1.0 - distance / max_l
- #adhesion_vector *= getFaceWeight(ob.data, nearest_result[2])
+ #adhesion_vector *= getFaceWeight(ob.data, nearest_result[3])
return adhesion_vector
@@ -423,15 +423,16 @@ def collision(ob, pos, new_pos):
tran_mat = ob.matrix_world.inverted()
tran_pos = tran_mat * pos
tran_new_pos = tran_mat * new_pos
+ tran_dir = tran_new_pos - tran_pos
- ray_result = ob.ray_cast(tran_pos, tran_new_pos)
+ ray_result = ob.ray_cast(tran_pos, tran_dir, tran_dir.length)
# If there's a collision we need to check it
- if ray_result[2] != -1:
+ if ray_result[0]:
# Check whether the collision is going into the object
- if (tran_new_pos - tran_pos).dot(ray_result[1]) < 0.0:
+ if tran_dir.dot(ray_result[2]) < 0.0:
# Find projection of the piont onto the plane
p0 = tran_new_pos - (tran_new_pos -
- ray_result[0]).project(ray_result[1])
+ ray_result[1]).project(ray_result[2])
# Reflect in the plane
tran_new_pos += 2 * (p0 - tran_new_pos)
new_pos *= 0
diff --git a/add_curve_sapling/__init__.py b/add_curve_sapling/__init__.py
index 00225217..fa1b3267 100644
--- a/add_curve_sapling/__init__.py
+++ b/add_curve_sapling/__init__.py
@@ -396,6 +396,10 @@ class AddTree(bpy.types.Operator):
def poll(cls, context):
return context.mode == 'OBJECT'
+ def check(self, context):
+ # TODO, should check exact vars which require redraw
+ return True
+
def draw(self, context):
layout = self.layout
diff --git a/add_mesh_extra_objects/add_mesh_3d_function_surface.py b/add_mesh_extra_objects/add_mesh_3d_function_surface.py
index f91aff6c..f41673ab 100644
--- a/add_mesh_extra_objects/add_mesh_3d_function_surface.py
+++ b/add_mesh_extra_objects/add_mesh_3d_function_surface.py
@@ -278,21 +278,12 @@ def xyz_function_surface_faces(self, x_eq, y_eq, z_eq,
# Try to evaluate the equations.
try:
- a = float(eval(*expr_args_a))
- b = float(eval(*expr_args_b))
- c = float(eval(*expr_args_c))
-
- safe_dict['a'] = a
- safe_dict['b'] = b
- safe_dict['c'] = c
-
- f = float(eval(*expr_args_f))
- g = float(eval(*expr_args_g))
- h = float(eval(*expr_args_h))
-
- safe_dict['f'] = f
- safe_dict['g'] = g
- safe_dict['h'] = h
+ safe_dict['a'] = float(eval(*expr_args_a))
+ safe_dict['b'] = float(eval(*expr_args_b))
+ safe_dict['c'] = float(eval(*expr_args_c))
+ safe_dict['f'] = float(eval(*expr_args_f))
+ safe_dict['g'] = float(eval(*expr_args_g))
+ safe_dict['h'] = float(eval(*expr_args_h))
verts.append((
float(eval(*expr_args_x)),
diff --git a/add_mesh_extra_objects/add_mesh_honeycomb.py b/add_mesh_extra_objects/add_mesh_honeycomb.py
index df60671b..3e30afe7 100644
--- a/add_mesh_extra_objects/add_mesh_honeycomb.py
+++ b/add_mesh_extra_objects/add_mesh_honeycomb.py
@@ -192,7 +192,12 @@ class add_mesh_honeycomb(bpy.types.Operator):
name = 'Num of cols', default = 2,
min = 1, max = 100,
description='Number of the columns')
-
+ layers = BoolVectorProperty(
+ name="Layers",
+ size=20,
+ subtype='LAYER',
+ options={'HIDDEN', 'SKIP_SAVE'},
+ )
def fix_edge(self, context):
m = edge_max(self.diam)
if self.edge > m: self.edge = m
diff --git a/add_mesh_extra_objects/add_mesh_menger_sponge.py b/add_mesh_extra_objects/add_mesh_menger_sponge.py
index 43c8c57d..1e58ebce 100644
--- a/add_mesh_extra_objects/add_mesh_menger_sponge.py
+++ b/add_mesh_extra_objects/add_mesh_menger_sponge.py
@@ -4,7 +4,7 @@
import bpy
-from bpy.props import IntProperty, BoolProperty, FloatVectorProperty, FloatProperty
+from bpy.props import IntProperty, BoolProperty, BoolVectorProperty, FloatVectorProperty, FloatProperty
import bpy
import mathutils
@@ -166,7 +166,12 @@ class AddMengerSponge(bpy.types.Operator):
name="Rotation",
subtype='EULER',
)
-
+ layers = BoolVectorProperty(
+ name="Layers",
+ size=20,
+ subtype='LAYER',
+ options={'HIDDEN', 'SKIP_SAVE'},
+ )
def execute(self, context):
sponger = MengerSponge(self.level)
vertices, faces = sponger.create(self.radius * 2, self.radius * 2)
diff --git a/add_mesh_extra_objects/add_mesh_round_cube.py b/add_mesh_extra_objects/add_mesh_round_cube.py
index 169a7da7..189f19b4 100644
--- a/add_mesh_extra_objects/add_mesh_round_cube.py
+++ b/add_mesh_extra_objects/add_mesh_round_cube.py
@@ -320,7 +320,7 @@ class AddRoundCube(bpy.types.Operator, object_utils.AddObjectHelper):
"""Add Round Cube Primitive"""
bl_idname = 'mesh.primitive_round_cube_add'
bl_label = 'Add Round Cube'
- bl_description = 'Add mesh primitives: Quadspheres, Capsules, Rounded Cuboids, 3D Grids, etc.'
+ bl_description = 'Add mesh primitives: Quadspheres, Capsules, Rounded Cuboids, 3D Grids, etc'
bl_options = {'REGISTER', 'UNDO', 'PRESET'}
sanity_check_verts = 200000
diff --git a/bone_selection_groups.py b/bone_selection_groups.py
new file mode 100644
index 00000000..1c00bb13
--- /dev/null
+++ b/bone_selection_groups.py
@@ -0,0 +1,244 @@
+# ***** BEGIN GPL LICENSE BLOCK *****
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License
+# as published by the Free Software Foundation; either version 2
+# of the License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+#
+# ***** END GPL LICENCE BLOCK *****
+
+bl_info = {
+ "name": "Bone Selection Groups",
+ "author": "Antony Riakiotakis",
+ "version": (1, 0, 2),
+ "blender": (2, 75, 0),
+ "location": "Properties > Object Buttons",
+ "description": "Operator and storage for restoration of bone selection state.",
+ "category": "Animation",
+}
+
+import bpy
+from bpy.types import (
+ Operator,
+ Panel,
+ UIList,
+)
+
+from bpy.props import (
+ StringProperty,
+ BoolProperty,
+ IntProperty,
+ FloatProperty,
+ EnumProperty,
+ CollectionProperty,
+ BoolVectorProperty,
+ FloatVectorProperty,
+)
+
+
+class POSE_PT_selection_sets(Panel):
+ bl_label = "Selection Sets"
+ bl_space_type = 'PROPERTIES'
+ bl_region_type = 'WINDOW'
+ bl_context = "data"
+
+ @classmethod
+ def poll(cls, context):
+ return context.object.type == 'ARMATURE'
+
+ def draw(self, context):
+ layout = self.layout
+
+ armature = context.object
+
+ # Rig type list
+ row = layout.row()
+ row.template_list(
+ "POSE_UL_selection_set", "",
+ armature, "selection_sets",
+ armature, "active_selection_set")
+
+ col = row.column()
+ colsub = col.column(align=True)
+ colsub.operator("pose.selection_set_add", icon='ZOOMIN', text="")
+ colsub.operator("pose.selection_set_remove", icon='ZOOMOUT', text="")
+
+ layout.operator("pose.selection_set_toggle")
+
+
+class POSE_UL_selection_set(UIList):
+ def draw_item(self, context, layout, data, set, icon, active_data, active_propname, index):
+ layout.prop(set, "name", text="", emboss=False)
+
+
+class SelectionEntry(bpy.types.PropertyGroup):
+ name = StringProperty(name="Bone Name")
+
+
+class SelectionSet(bpy.types.PropertyGroup):
+ name = StringProperty(name="Set Name")
+ bone_ids = CollectionProperty(type=SelectionEntry)
+
+
+class PluginOperator(Operator):
+ @classmethod
+ def poll(self, context):
+ return (context.object and
+ context.object.type == 'ARMATURE' and
+ context.mode == 'POSE')
+
+
+class POSE_OT_selection_set_add(PluginOperator):
+ bl_idname = "pose.selection_set_add"
+ bl_label = "Add Selection Set"
+ bl_options = {'UNDO', 'REGISTER'}
+
+ def execute(self, context):
+ keep = False
+ armature = context.object
+ pose = armature.pose
+
+ selection_set = armature.selection_sets.add()
+ selection_set.name = "SelectionSet.%d" % len(armature.selection_sets)
+ armature.active_selection_set = len(armature.selection_sets) - 1
+ for bone in pose.bones:
+ if (bone.bone.select):
+ bone_id = selection_set.bone_ids.add()
+ bone_id.name = bone.name
+ keep = True
+
+ if (not keep):
+ armature.selection_sets.remove(armature.active_selection_set)
+ numsets = len(armature.selection_sets)
+ if (armature.active_selection_set > (numsets - 1) and numsets > 0):
+ armature.active_selection_set = len(armature.selection_sets) - 1
+ return {'CANCELLED'}
+
+ return {'FINISHED'}
+
+
+class POSE_OT_selection_set_remove(PluginOperator):
+ bl_idname = "pose.selection_set_remove"
+ bl_label = "Delete Selection Set"
+ bl_options = {'UNDO', 'REGISTER'}
+
+ def execute(self, context):
+ armature = context.object
+
+ armature.selection_sets.remove(armature.active_selection_set)
+ numsets = len(armature.selection_sets)
+ if (armature.active_selection_set > (numsets - 1) and numsets > 0):
+ armature.active_selection_set = len(armature.selection_sets) - 1
+ return {'FINISHED'}
+
+
+class POSE_OT_selection_set_toggle(PluginOperator):
+ bl_idname = "pose.selection_set_toggle"
+ bl_label = "Toggle Selection Set"
+ bl_options = {'UNDO', 'REGISTER'}
+
+ def execute(self, context):
+ armature = context.object
+ pose = armature.pose
+
+ selection_set = armature.selection_sets[armature.active_selection_set]
+ for bone in pose.bones:
+ bone.bone.select = False
+
+ for bone in selection_set.bone_ids:
+ pose.bones[bone.name].bone.select = True
+
+ return {'FINISHED'}
+
+
+class MotionPathsCopyStartFrame(Operator):
+ bl_idname = "anim.motionpaths_copy_scene_startframe"
+ bl_label = "Copy Scene Start Frame"
+ bl_options = {'REGISTER', 'UNDO'}
+
+ armature_paths = BoolProperty()
+
+ @classmethod
+ def poll(cls, context):
+ return (context.object)
+
+ def execute(self, context):
+ avs = None
+ motionpath = None
+ ob = context.object
+ scene = context.scene
+
+ if (self.armature_paths):
+ avs = ob.pose.animation_visualization
+ else:
+ avs = ob.animation_visualization
+
+ preview = scene.use_preview_range
+
+ if (avs):
+ motionpath = avs.motion_path
+
+ if (motionpath):
+ if (preview):
+ motionpath.frame_start = scene.frame_preview_start
+ motionpath.frame_end = scene.frame_preview_end
+ else:
+ motionpath.frame_start = scene.frame_start
+ motionpath.frame_end = scene.frame_end
+ else:
+ return {'CANCELLED'}
+
+ return {'FINISHED'}
+
+
+classes = (
+ POSE_PT_selection_sets,
+ POSE_UL_selection_set,
+ SelectionEntry,
+ SelectionSet,
+ POSE_OT_selection_set_add,
+ POSE_OT_selection_set_remove,
+ POSE_OT_selection_set_toggle,
+ MotionPathsCopyStartFrame
+)
+
+
+def copy_frames_armature(self, context):
+ layout = self.layout
+ layout.operator("anim.motionpaths_copy_scene_startframe").armature_paths = True
+
+
+def copy_frames_object(self, context):
+ layout = self.layout
+ layout.operator("anim.motionpaths_copy_scene_startframe").armature_paths = False
+
+
+def register():
+ for cls in classes:
+ bpy.utils.register_class(cls)
+
+ bpy.types.Object.selection_sets = CollectionProperty(type=SelectionSet)
+ bpy.types.Object.active_selection_set = IntProperty()
+ bpy.types.DATA_PT_motion_paths.append(copy_frames_armature)
+ bpy.types.OBJECT_PT_motion_paths.append(copy_frames_object)
+
+
+def unregister():
+ for cls in classes:
+ bpy.utils.unregister_class(cls)
+
+ del bpy.types.Object.selection_sets
+ del bpy.types.Object.active_selection_set
+
+
+if __name__ == "__main__":
+ register()
diff --git a/curve_simplify.py b/curve_simplify.py
index 1168cbcc..4ed3a5b3 100644
--- a/curve_simplify.py
+++ b/curve_simplify.py
@@ -19,8 +19,8 @@
bl_info = {
"name": "Simplify Curves",
"author": "testscreenings",
- "version": (1,),
- "blender": (2, 59, 0),
+ "version": (1, 0, 1),
+ "blender": (2, 75, 0),
"location": "Search > Simplify Curves",
"description": "Simplifies 3D Curve objects and animation F-Curves",
"warning": "",
@@ -39,6 +39,11 @@ from bpy.props import *
import mathutils
import math
+from bpy.types import Menu
+
+
+ ## Check for curve
+
##############################
#### simplipoly algorithm ####
##############################
@@ -357,6 +362,28 @@ def fcurves_simplify(context, obj, options, fcurves):
#fcurve.points.foreach_set('co', newPoints)
return
+### MENU ###
+
+class GRAPH_OT_simplifyf(bpy.types.Menu):
+ bl_space_type = "GRAPH_EDITOR"
+ bl_label = "Simplify F Curves"
+
+ def draw(self, context):
+ layout = self.layout
+
+def menu_func(self, context):
+ self.layout.operator(GRAPH_OT_simplify.bl_idname)
+
+class CurveMenu(Menu):
+ bl_space_type = "3D_VIEW"
+ bl_label = "Simplify Curves"
+
+ def draw(self, context):
+ layout = self.layout
+
+def menu(self, context):
+ self.layout.operator("curve.simplify", text="Curve Simplify", icon="CURVE_DATA")
+
#################################################
#### ANIMATION CURVES OPERATOR ##################
#################################################
@@ -364,7 +391,7 @@ class GRAPH_OT_simplify(bpy.types.Operator):
""""""
bl_idname = "graph.simplify"
bl_label = "Simplifiy F-Curves"
- bl_description = "Simplify selected F-Curves"
+ bl_description = "Simplify selected Curves"
bl_options = {'REGISTER', 'UNDO'}
## Properties
@@ -423,6 +450,7 @@ class GRAPH_OT_simplify(bpy.types.Operator):
def draw(self, context):
layout = self.layout
col = layout.column()
+ col.label(text = "Simplify F-Curves")
col.prop(self, 'error', expand=True)
## Check for animdata
@@ -549,7 +577,6 @@ class CURVE_OT_simplify(bpy.types.Operator):
col.prop(self, 'keepShort', expand=True)
- ## Check for curve
@classmethod
def poll(cls, context):
obj = context.active_object
@@ -588,12 +615,18 @@ class CURVE_OT_simplify(bpy.types.Operator):
def register():
bpy.utils.register_module(__name__)
- pass
+ bpy.types.GRAPH_MT_channel.append(menu_func)
+ bpy.types.DOPESHEET_MT_channel.append(menu_func)
+ bpy.types.INFO_MT_curve_add.append(menu)
def unregister():
- bpy.utils.unregister_module(__name__)
- pass
+ bpy.types.GRAPH_MT_channel.remove(menu_func)
+ bpy.types.DOPESHEET_MT_channel.remove(menu_func)
+ bpy.types.INFO_MT_curve_add.remove(menu)
+
+ bpy.utils.unregister_module(__name__)
if __name__ == "__main__":
register()
+
diff --git a/game_engine_publishing.py b/game_engine_publishing.py
index 98f6e3c6..495b0123 100644
--- a/game_engine_publishing.py
+++ b/game_engine_publishing.py
@@ -27,12 +27,12 @@ import stat
bl_info = {
"name": "Game Engine Publishing",
- "author": "Mitchell Stokes (Moguri)",
+ "author": "Mitchell Stokes (Moguri), Oren Titane (Genome36)",
"version": (0, 1, 0),
- "blender": (2, 72, 0),
+ "blender": (2, 75, 0),
"location": "Render Properties > Publishing Info",
"description": "Publish .blend file as game engine runtime, manage versions and platforms",
- "warning": "beta",
+ "warning": "",
"wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/Scripts/Game_Engine/Publishing",
"category": "Game Engine",
}
@@ -80,10 +80,9 @@ def WriteRuntime(player_path, output_path, asset_paths, copy_python, overwrite_l
output_path = bpy.path.ensure_ext(output_path, '.exe')
# Get the player's binary and the offset for the blend
- file = open(player_path, 'rb')
- player_d = file.read()
- offset = file.tell()
- file.close()
+ with open(player_path, "rb") as file:
+ player_d = file.read()
+ offset = file.tell()
# Create a tmp blend file (Blenderplayer doesn't like compressed blends)
tempdir = tempfile.mkdtemp()
@@ -95,31 +94,28 @@ def WriteRuntime(player_path, output_path, asset_paths, copy_python, overwrite_l
)
# Get the blend data
- blend_file = open(blend_path, 'rb')
- blend_d = blend_file.read()
- blend_file.close()
+ with open(blend_path, "rb") as blend_file:
+ blend_d = blend_file.read()
# Get rid of the tmp blend, we're done with it
os.remove(blend_path)
os.rmdir(tempdir)
# Create a new file for the bundled runtime
- output = open(output_path, 'wb')
+ with open(output_path, "wb") as output:
+ # Write the player and blend data to the new runtime
+ print("Writing runtime...", end=" ", flush=True)
+ output.write(player_d)
+ output.write(blend_d)
- # Write the player and blend data to the new runtime
- print("Writing runtime...", end=" ", flush=True)
- output.write(player_d)
- output.write(blend_d)
+ # Store the offset (an int is 4 bytes, so we split it up into 4 bytes and save it)
+ output.write(struct.pack('BBBB', (offset >> 24) & 0xFF,
+ (offset >> 16) & 0xFF,
+ (offset >> 8) & 0xFF,
+ (offset >> 0) & 0xFF))
- # Store the offset (an int is 4 bytes, so we split it up into 4 bytes and save it)
- output.write(struct.pack('BBBB', (offset >> 24) & 0xFF,
- (offset >> 16) & 0xFF,
- (offset >> 8) & 0xFF,
- (offset >> 0) & 0xFF))
-
- # Stuff for the runtime
- output.write(b'BRUNTIME')
- output.close()
+ # Stuff for the runtime
+ output.write(b'BRUNTIME')
print("done", flush=True)
@@ -251,6 +247,22 @@ class PublishAllPlatforms(bpy.types.Operator):
return {'FINISHED'}
+class RENDER_UL_assets(bpy.types.UIList):
+ bl_label = "Asset Paths Listing"
+
+ def draw_item(self, context, layout, data, item, icon, active_data, active_propname):
+ layout.prop(item, "name", text="", emboss=False)
+
+
+class RENDER_UL_platforms(bpy.types.UIList):
+ bl_label = "Platforms Listing"
+
+ def draw_item(self, context, layout, data, item, icon, active_data, active_propname):
+ row = layout.row()
+ row.label(item.name)
+ row.prop(item, "publish", text="")
+
+
class RENDER_PT_publish(bpy.types.Panel):
bl_label = "Publishing Info"
bl_space_type = "PROPERTIES"
@@ -266,39 +278,54 @@ class RENDER_PT_publish(bpy.types.Panel):
ps = context.scene.ge_publish_settings
layout = self.layout
+ # config
layout.prop(ps, 'output_path')
layout.prop(ps, 'runtime_name')
layout.prop(ps, 'lib_path')
layout.prop(ps, 'make_archive')
+ layout.separator()
+
+ # assets list
layout.label("Asset Paths")
+
+ # UI_UL_list
row = layout.row()
- row.template_list("UI_UL_list", "assets_list", ps, 'asset_paths', ps, 'asset_paths_active')
+ row.template_list("RENDER_UL_assets", "assets_list", ps, 'asset_paths', ps, 'asset_paths_active')
+
+ # operators
col = row.column(align=True)
col.operator(PublishAddAssetPath.bl_idname, icon='ZOOMIN', text="")
col.operator(PublishRemoveAssetPath.bl_idname, icon='ZOOMOUT', text="")
+ # indexing
if len(ps.asset_paths) > ps.asset_paths_active >= 0:
ap = ps.asset_paths[ps.asset_paths_active]
row = layout.row()
- row.prop(ap, 'name')
row.prop(ap, 'overwrite')
- layout.label("Platforms")
- layout.prop(ps, 'publish_default_platform')
+ layout.separator()
+
+ # publishing list
+ row = layout.row(align=True)
+ row.label("Platforms")
+ row.prop(ps, 'publish_default_platform')
+
+ # UI_UL_list
row = layout.row()
- row.template_list("UI_UL_list", "platforms_list", ps, 'platforms', ps, 'platforms_active')
+ row.template_list("RENDER_UL_platforms", "platforms_list", ps, 'platforms', ps, 'platforms_active')
+ # operators
col = row.column(align=True)
col.operator(PublishAddPlatform.bl_idname, icon='ZOOMIN', text="")
col.operator(PublishRemovePlatform.bl_idname, icon='ZOOMOUT', text="")
col.menu("PUBLISH_MT_platform_specials", icon='DOWNARROW_HLT', text="")
+ # indexing
if len(ps.platforms) > ps.platforms_active >= 0:
platform = ps.platforms[ps.platforms_active]
layout.prop(platform, 'name')
layout.prop(platform, 'player_path')
- layout.prop(platform, 'publish')
layout.operator(PublishAllPlatforms.bl_idname, 'Publish Platforms')
@@ -310,7 +337,11 @@ class PublishAutoPlatforms(bpy.types.Operator):
def execute(self, context):
ps = context.scene.ge_publish_settings
+ # verify lib folder
lib_path = bpy.path.abspath(ps.lib_path)
+ if not os.path.exists(lib_path):
+ self.report({'ERROR'}, "Could not add platforms, lib folder (%s) does not exist" % lib_path)
+ return {'CANCELLED'}
for lib in [i for i in os.listdir(lib_path) if os.path.isdir(os.path.join(lib_path, i))]:
print("Found folder:", lib)
@@ -352,7 +383,11 @@ class PublishDownloadPlatforms(bpy.types.Operator):
remote_platforms = []
ps = context.scene.ge_publish_settings
+
+ # create lib folder if not already available
lib_path = bpy.path.abspath(ps.lib_path)
+ if not os.path.exists(lib_path):
+ os.makedirs(lib_path)
print("Retrieving list of platforms from blender.org...", end=" ", flush=True)
diff --git a/io_anim_bvh/__init__.py b/io_anim_bvh/__init__.py
index 57f879c0..27c8003b 100644
--- a/io_anim_bvh/__init__.py
+++ b/io_anim_bvh/__init__.py
@@ -26,7 +26,7 @@ bl_info = {
"description": "Import-Export BVH from armature objects",
"warning": "",
"wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/"
- "Scripts/Import-Export/MotionCapture_BVH",
+ "Scripts/Import-Export/BVH_Importer_Exporter",
"support": 'OFFICIAL',
"category": "Import-Export"}
@@ -87,9 +87,19 @@ class ImportBVH(bpy.types.Operator, ImportHelper, ImportBVHOrientationHelper):
)
use_fps_scale = BoolProperty(
name="Scale FPS",
- description=("Scale the framerate from the BVH to "
- "the current scenes, otherwise each "
- "BVH frame maps directly to a Blender frame"),
+ description=("Scale the framerate from the BVH to the current scenes, "
+ "otherwise each BVH frame maps directly to a Blender frame"),
+ default=False,
+ )
+ update_scene_fps = BoolProperty(
+ name="Update Scene FPS",
+ description="Set the scene framerate to that of the BVH file (note that this "
+ "nullifies the 'Scale FPS' option, as the scale will be 1:1)",
+ default=False
+ )
+ update_scene_duration = BoolProperty(
+ name="Update Scene Duration",
+ description="Extend the scene's duration to the BVH duration (never shortens the scene)",
default=False,
)
use_cyclic = BoolProperty(
@@ -102,8 +112,8 @@ class ImportBVH(bpy.types.Operator, ImportHelper, ImportBVHOrientationHelper):
description="Rotation conversion",
items=(('QUATERNION', "Quaternion",
"Convert rotations to quaternions"),
- ('NATIVE', "Euler (Native)", ("Use the rotation order "
- "defined in the BVH file")),
+ ('NATIVE', "Euler (Native)",
+ "Use the rotation order defined in the BVH file"),
('XYZ', "Euler (XYZ)", "Convert rotations to euler XYZ"),
('XZY', "Euler (XZY)", "Convert rotations to euler XZY"),
('YXZ', "Euler (YXZ)", "Convert rotations to euler YXZ"),
@@ -127,7 +137,7 @@ class ImportBVH(bpy.types.Operator, ImportHelper, ImportBVHOrientationHelper):
keywords["global_matrix"] = global_matrix
from . import import_bvh
- return import_bvh.load(self, context, **keywords)
+ return import_bvh.load(context, report=self.report, **keywords)
class ExportBVH(bpy.types.Operator, ExportHelper):
@@ -162,7 +172,7 @@ class ExportBVH(bpy.types.Operator, ExportHelper):
name="Rotation",
description="Rotation conversion",
items=(('NATIVE', "Euler (Native)",
- "Use the rotation order defined in the BVH file"),
+ "Use the rotation order defined in the BVH file"),
('XYZ', "Euler (XYZ)", "Convert rotations to euler XYZ"),
('XZY', "Euler (XZY)", "Convert rotations to euler XZY"),
('YXZ', "Euler (YXZ)", "Convert rotations to euler YXZ"),
diff --git a/io_anim_bvh/export_bvh.py b/io_anim_bvh/export_bvh.py
index 4d7f0b09..ed80ed59 100644
--- a/io_anim_bvh/export_bvh.py
+++ b/io_anim_bvh/export_bvh.py
@@ -147,7 +147,7 @@ def write_armature(context,
# redefine bones as sorted by serialized_names
# so we can write motion
- class DecoratedBone(object):
+ class DecoratedBone:
__slots__ = (
"name", # bone name, used as key in many places
"parent", # decorated bone parent, set in a later loop
diff --git a/io_anim_bvh/import_bvh.py b/io_anim_bvh/import_bvh.py
index 50ae4bd7..df733a73 100644
--- a/io_anim_bvh/import_bvh.py
+++ b/io_anim_bvh/import_bvh.py
@@ -20,13 +20,13 @@
# Script copyright (C) Campbell Barton
-from math import radians
+from math import radians, ceil
import bpy
from mathutils import Vector, Euler, Matrix
-class BVH_Node(object):
+class BVH_Node:
__slots__ = (
'name', # bvh joint name
'parent', # BVH_Node type or None for no parent
@@ -79,7 +79,7 @@ class BVH_Node(object):
self.anim_data = [(0, 0, 0, 0, 0, 0)]
def __repr__(self):
- return ('BVH name:"%s", rest_loc:(%.3f,%.3f,%.3f), rest_tail:(%.3f,%.3f,%.3f)' %
+ return ("BVH name: '%s', rest_loc:(%.3f,%.3f,%.3f), rest_tail:(%.3f,%.3f,%.3f)" %
(self.name,
self.rest_head_world.x, self.rest_head_world.y, self.rest_head_world.z,
self.rest_head_world.x, self.rest_head_world.y, self.rest_head_world.z))
@@ -114,6 +114,7 @@ def read_bvh(context, file_path, rotate_mode='XYZ', global_scale=1.0):
bvh_nodes = {None: None}
bvh_nodes_serial = [None]
+ bvh_frame_count = None
bvh_frame_time = None
channelIndex = -1
@@ -205,8 +206,13 @@ def read_bvh(context, file_path, rotate_mode='XYZ', global_scale=1.0):
# Frames: n
# Frame Time: dt
if len(file_lines[lineIdx]) == 1 and file_lines[lineIdx][0].lower() == 'motion':
- lineIdx += 2 # Read frame rate.
+ lineIdx += 1 # Read frame count.
+ if (len(file_lines[lineIdx]) == 2 and
+ file_lines[lineIdx][0].lower() == 'frames:'):
+ bvh_frame_count = int(file_lines[lineIdx][1])
+
+ lineIdx += 1 # Read frame rate.
if (len(file_lines[lineIdx]) == 3 and
file_lines[lineIdx][0].lower() == 'frame' and
file_lines[lineIdx][1].lower() == 'time:'):
@@ -292,7 +298,7 @@ def read_bvh(context, file_path, rotate_mode='XYZ', global_scale=1.0):
bvh_node.rest_tail_local.y = bvh_node.rest_tail_local.y + global_scale / 10
bvh_node.rest_tail_world.y = bvh_node.rest_tail_world.y + global_scale / 10
- return bvh_nodes, bvh_frame_time
+ return bvh_nodes, bvh_frame_time, bvh_frame_count
def bvh_node_dict2objects(context, bvh_name, bvh_nodes, rotate_mode='NATIVE', frame_start=1, IMPORT_LOOP=False):
@@ -601,9 +607,9 @@ def bvh_node_dict2armature(context,
return arm_ob
-def load(operator,
- context,
- filepath="",
+def load(context,
+ filepath,
+ *,
target='ARMATURE',
rotate_mode='NATIVE',
global_scale=1.0,
@@ -611,26 +617,46 @@ def load(operator,
frame_start=1,
global_matrix=None,
use_fps_scale=False,
+ update_scene_fps=False,
+ update_scene_duration=False,
+ report=print
):
import time
t1 = time.time()
- print('\tparsing bvh %r...' % filepath, end="")
+ print("\tparsing bvh %r..." % filepath, end="")
- bvh_nodes, bvh_frame_time = read_bvh(context, filepath,
+ bvh_nodes, bvh_frame_time, bvh_frame_count = read_bvh(context, filepath,
rotate_mode=rotate_mode,
global_scale=global_scale)
- print('%.4f' % (time.time() - t1))
+ print("%.4f" % (time.time() - t1))
scene = context.scene
frame_orig = scene.frame_current
- fps = scene.render.fps
+
+ # Broken BVH handling: guess frame rate when it is not contained in the file.
if bvh_frame_time is None:
- bvh_frame_time = 1.0 / scene.render.fps
+ report({'WARNING'}, "The BVH file does not contain frame duration in its MOTION "
+ "section, assuming the BVH and Blender scene have the same "
+ "frame rate")
+ bvh_frame_time = scene.render.fps_base / scene.render.fps
+ # No need to scale the frame rate, as they're equal now anyway.
+ use_fps_scale = False
+
+ if update_scene_fps:
+ _update_scene_fps(context, report, bvh_frame_time)
+
+ # Now that we have a 1-to-1 mapping of Blender frames and BVH frames, there is no need
+ # to scale the FPS any more. It's even better not to, to prevent roundoff errors.
+ use_fps_scale = False
+
+ if update_scene_duration:
+ _update_scene_duration(context, report, bvh_frame_count, bvh_frame_time, frame_start,
+ use_fps_scale)
t1 = time.time()
- print('\timporting to blender...', end="")
+ print("\timporting to blender...", end="")
bvh_name = bpy.path.display_name_from_filepath(filepath)
@@ -652,10 +678,55 @@ def load(operator,
)
else:
- raise Exception("invalid type")
+ report({'ERROR'}, "Invalid target %r (must be 'ARMATURE' or 'OBJECT')" % target)
+ return {'CANCELLED'}
print('Done in %.4f\n' % (time.time() - t1))
context.scene.frame_set(frame_orig)
return {'FINISHED'}
+
+
+def _update_scene_fps(context, report, bvh_frame_time):
+ """Update the scene's FPS settings from the BVH, but only if the BVH contains enough info."""
+
+ # Broken BVH handling: prevent division by zero.
+ if bvh_frame_time == 0.0:
+ report({'WARNING'}, "Unable to update scene frame rate, as the BVH file "
+ "contains a zero frame duration in its MOTION section")
+ return
+
+ scene = context.scene
+ scene_fps = scene.render.fps / scene.render.fps_base
+ new_fps = 1.0 / bvh_frame_time
+
+ if scene.render.fps != new_fps or scene.render.fps_base != 1.0:
+ print("\tupdating scene FPS (was %f) to BVH FPS (%f)" % (scene_fps, new_fps))
+ scene.render.fps = new_fps
+ scene.render.fps_base = 1.0
+
+
+def _update_scene_duration(context, report, bvh_frame_count, bvh_frame_time, frame_start,
+ use_fps_scale):
+ """Extend the scene's duration so that the BVH file fits in its entirety."""
+
+ if bvh_frame_count is None:
+ report({'WARNING'}, "Unable to extend the scene duration, as the BVH file does not "
+ "contain the number of frames in its MOTION section")
+ return
+
+ # Not likely, but it can happen when a BVH is just used to store an armature.
+ if bvh_frame_count == 0:
+ return
+
+ if use_fps_scale:
+ scene_fps = context.scene.render.fps / context.scene.render.fps_base
+ scaled_frame_count = int(ceil(bvh_frame_count * bvh_frame_time * scene_fps))
+ bvh_last_frame = frame_start + scaled_frame_count
+ else:
+ bvh_last_frame = frame_start + bvh_frame_count
+
+ # Only extend the scene, never shorten it.
+ if context.scene.frame_end < bvh_last_frame:
+ context.scene.frame_end = bvh_last_frame
diff --git a/io_anim_nuke_chan/import_nuke_chan.py b/io_anim_nuke_chan/import_nuke_chan.py
index ed8528ab..48766e43 100644
--- a/io_anim_nuke_chan/import_nuke_chan.py
+++ b/io_anim_nuke_chan/import_nuke_chan.py
@@ -93,7 +93,7 @@ def read_chan(context, filepath, z_up, rot_ord, sensor_width, sensor_height):
obj.keyframe_insert("rotation_quaternion")
elif obj.rotation_mode == 'AXIS_ANGLE':
tmp_rot = trns[1].to_axis_angle()
- obj.rotation_axis_angle = (tmp_rot[1], ) + tmp_rot[0][:]
+ obj.rotation_axis_angle = (tmp_rot[1], *tmp_rot[0])
obj.keyframe_insert("rotation_axis_angle")
del tmp_rot
else:
diff --git a/io_blend_utils/__init__.py b/io_blend_utils/__init__.py
new file mode 100644
index 00000000..adbd2d04
--- /dev/null
+++ b/io_blend_utils/__init__.py
@@ -0,0 +1,110 @@
+# ***** BEGIN GPL LICENSE BLOCK *****
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License
+# as published by the Free Software Foundation; either version 2
+# of the License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
+#
+# ***** END GPL LICENCE BLOCK *****
+
+bl_info = {
+ "name": "Blend File Utils",
+ "author": "Campbell Barton",
+ "version": (0, 1),
+ "blender": (2, 76, 0),
+ "location": "File > External Data > Blend Utils",
+ "description": "Utility for packing blend files",
+ "warning": "",
+ "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/Scripts/Import-Export/BlendFile_Utils",
+ "support": 'OFFICIAL',
+ "category": "Import-Export",
+ }
+
+
+import bpy
+from bpy.types import Operator
+from bpy_extras.io_utils import ExportHelper
+
+from .bl_utils.subprocess_helper import SubprocessHelper
+
+
+class ExportBlendPack(Operator, ExportHelper, SubprocessHelper):
+ """Packs a blend file and all its dependencies into an archive for easy redistribution"""
+ bl_idname = "export_blend.pack"
+ bl_label = "Pack Blend to Archive"
+
+ # ExportHelper
+ filename_ext = ".zip"
+
+ # SubprocessHelper
+ report_interval = 0.25
+
+ temp_dir = None
+
+ @classmethod
+ def poll(cls, context):
+ return bpy.data.is_saved
+
+ def process_pre(self):
+ import os
+ import tempfile
+
+ self.temp_dir = tempfile.TemporaryDirectory()
+
+ filepath_blend = bpy.data.filepath
+
+ self.command = (
+ bpy.app.binary_path_python,
+ os.path.join(os.path.dirname(__file__), "blendfile_pack.py"),
+ # file to pack
+ "--input", filepath_blend,
+ # file to write
+ "--output", bpy.path.ensure_ext(self.filepath, ".zip"),
+ "--temp", self.temp_dir.name,
+ )
+
+ def process_post(self, returncode):
+ if self.temp_dir is not None:
+ try:
+ self.temp_dir.cleanup()
+ except:
+ import traceback
+ traceback.print_exc()
+
+
+def menu_func(self, context):
+ layout = self.layout
+ layout.separator()
+ layout.operator(ExportBlendPack.bl_idname)
+
+
+classes = (
+ ExportBlendPack,
+ )
+
+
+def register():
+ for cls in classes:
+ bpy.utils.register_class(cls)
+
+ bpy.types.INFO_MT_file_external_data.append(menu_func)
+
+
+def unregister():
+ for cls in classes:
+ bpy.utils.unregister_class(cls)
+
+ bpy.types.INFO_MT_file_external_data.remove(menu_func)
+
+
+if __name__ == "__main__":
+ register()
diff --git a/io_blend_utils/bl_utils/pipe_non_blocking.py b/io_blend_utils/bl_utils/pipe_non_blocking.py
new file mode 100644
index 00000000..ead0a738
--- /dev/null
+++ b/io_blend_utils/bl_utils/pipe_non_blocking.py
@@ -0,0 +1,100 @@
+# ##### BEGIN GPL LICENSE BLOCK #####
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License
+# as published by the Free Software Foundation; either version 2
+# of the License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+#
+# ##### END GPL LICENSE BLOCK #####
+
+# <pep8 compliant>
+
+"""
+Example use:
+
+ p = subprocess.Popen(
+ command,
+ stdout=subprocess.PIPE,
+ )
+
+ pipe_non_blocking_set(p.stdout.fileno())
+
+ try:
+ data = os.read(p.stdout.fileno(), 1)
+ except PortableBlockingIOError as ex:
+ if not pipe_non_blocking_is_error_blocking(ex):
+ raise ex
+"""
+
+
+__all__ = (
+ "pipe_non_blocking_set",
+ "pipe_non_blocking_is_error_blocking",
+ "PortableBlockingIOError",
+ )
+
+import os
+
+
+if os.name == "nt":
+ # MS-Windows Version
+ def pipe_non_blocking_set(fd):
+ # Constant could define globally but avoid polluting the name-space
+ # thanks to: http://stackoverflow.com/questions/34504970
+ import msvcrt
+
+ from ctypes import windll, byref, wintypes, WinError, POINTER
+ from ctypes.wintypes import HANDLE, DWORD, BOOL
+
+ LPDWORD = POINTER(DWORD)
+
+ PIPE_NOWAIT = wintypes.DWORD(0x00000001)
+
+ def pipe_no_wait(pipefd):
+ SetNamedPipeHandleState = windll.kernel32.SetNamedPipeHandleState
+ SetNamedPipeHandleState.argtypes = [HANDLE, LPDWORD, LPDWORD, LPDWORD]
+ SetNamedPipeHandleState.restype = BOOL
+
+ h = msvcrt.get_osfhandle(pipefd)
+
+ res = windll.kernel32.SetNamedPipeHandleState(h, byref(PIPE_NOWAIT), None, None)
+ if res == 0:
+ print(WinError())
+ return False
+ return True
+
+ return pipe_no_wait(fd)
+
+ def pipe_non_blocking_is_error_blocking(ex):
+ if not isinstance(ex, PortableBlockingIOError):
+ return False
+ from ctypes import GetLastError
+ ERROR_NO_DATA = 232
+
+ return (GetLastError() == ERROR_NO_DATA)
+
+ PortableBlockingIOError = OSError
+else:
+ # Posix Version
+ def pipe_non_blocking_set(fd):
+ import fcntl
+ fl = fcntl.fcntl(fd, fcntl.F_GETFL)
+ fcntl.fcntl(fd, fcntl.F_SETFL, fl | os.O_NONBLOCK)
+ return True
+
+ # only for compatibility with 'nt' version.
+ def pipe_non_blocking_is_error_blocking(ex):
+ if not isinstance(ex, PortableBlockingIOError):
+ return False
+ return True
+
+ PortableBlockingIOError = BlockingIOError
diff --git a/io_blend_utils/bl_utils/subprocess_helper.py b/io_blend_utils/bl_utils/subprocess_helper.py
new file mode 100644
index 00000000..024f0da9
--- /dev/null
+++ b/io_blend_utils/bl_utils/subprocess_helper.py
@@ -0,0 +1,172 @@
+# ##### BEGIN GPL LICENSE BLOCK #####
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License
+# as published by the Free Software Foundation; either version 2
+# of the License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+#
+# ##### END GPL LICENSE BLOCK #####
+
+# <pep8 compliant>
+
+"""
+Defines an operator mix-in to use for non-blocking command line access.
+"""
+
+class SubprocessHelper:
+ """
+ Mix-in class for operators to run commands in a non-blocking way.
+
+ This uses a modal operator to manage an external process.
+
+ Subclass must define:
+ ``command``:
+ List of arguments to pass to subprocess.Popen
+ report_interval: Time in seconds between updating reports.
+
+ ``process_pre()``:
+ Callback that runs before the process executes.
+
+ ``process_post(returncode)``:
+ Callback that runs when the process has ende.
+ returncode is -1 if the process was terminated.
+ """
+
+ @staticmethod
+ def _non_blocking_readlines(f, chunk=64):
+ """
+ Iterate over lines, yielding b'' when nothings left
+ or when new data is not yet available.
+ """
+ import os
+
+ from .pipe_non_blocking import (
+ pipe_non_blocking_set,
+ pipe_non_blocking_is_error_blocking,
+ PortableBlockingIOError,
+ )
+
+ fd = f.fileno()
+ pipe_non_blocking_set(fd)
+
+ blocks = []
+
+ while True:
+ try:
+ data = os.read(fd, chunk)
+ if not data:
+ # case were reading finishes with no trailing newline
+ yield b''.join(blocks)
+ blocks.clear()
+ except PortableBlockingIOError as ex:
+ if not pipe_non_blocking_is_error_blocking(ex):
+ raise ex
+
+ yield b''
+ continue
+
+ while True:
+ n = data.find(b'\n')
+ if n == -1:
+ break
+
+ yield b''.join(blocks) + data[:n + 1]
+ data = data[n + 1:]
+ blocks.clear()
+ blocks.append(data)
+
+ def _report_output(self):
+ stdout_line_iter, stderr_line_iter = self._buffer_iter
+ for line_iter, report_type in (
+ (stdout_line_iter, {'INFO'}),
+ (stderr_line_iter, {'WARNING'})
+ ):
+ while True:
+ line = next(line_iter).rstrip() # rstrip all, to include \r on windows
+ if not line:
+ break
+ self.report(report_type, line.decode(encoding='utf-8', errors='surrogateescape'))
+
+ def _wm_enter(self, context):
+ wm = context.window_manager
+ window = context.window
+
+ self._timer = wm.event_timer_add(self.report_interval, window)
+ window.cursor_set('WAIT')
+
+ def _wm_exit(self, context):
+ wm = context.window_manager
+ window = context.window
+
+ wm.event_timer_remove(self._timer)
+ window.cursor_set('DEFAULT')
+
+ def process_pre(self):
+ pass
+
+ def process_post(self, returncode):
+ pass
+
+ def modal(self, context, event):
+ wm = context.window_manager
+ p = self._process
+
+ if event.type == 'ESC':
+ self.cancel(context)
+ self.report({'INFO'}, "Operation aborted by user")
+ return {'CANCELLED'}
+
+ elif event.type == 'TIMER':
+ if p.poll() is not None:
+ self._report_output()
+ self._wm_exit(context)
+ self.process_post(p.returncode)
+ return {'FINISHED'}
+
+ self._report_output()
+
+ return {'PASS_THROUGH'}
+
+ def execute(self, context):
+ import subprocess
+
+ self.process_pre()
+
+ try:
+ p = subprocess.Popen(
+ self.command,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ )
+ except FileNotFoundError as ex:
+ # Command not found
+ self.report({'ERROR'}, str(ex))
+ return {'CANCELLED'}
+
+ self._process = p
+ self._buffer_iter = (
+ iter(self._non_blocking_readlines(p.stdout)),
+ iter(self._non_blocking_readlines(p.stderr)),
+ )
+
+ wm = context.window_manager
+ wm.modal_handler_add(self)
+
+ self._wm_enter(context)
+
+ return {'RUNNING_MODAL'}
+
+ def cancel(self, context):
+ self._wm_exit(context)
+ self._process.kill()
+ self.process_post(-1)
+
diff --git a/io_blend_utils/blend/blendfile.py b/io_blend_utils/blend/blendfile.py
new file mode 100644
index 00000000..0739a1bc
--- /dev/null
+++ b/io_blend_utils/blend/blendfile.py
@@ -0,0 +1,917 @@
+# ***** BEGIN GPL LICENSE BLOCK *****
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License
+# as published by the Free Software Foundation; either version 2
+# of the License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
+#
+# ***** END GPL LICENCE BLOCK *****
+#
+# (c) 2009, At Mind B.V. - Jeroen Bakker
+# (c) 2014, Blender Foundation - Campbell Barton
+
+import os
+import struct
+import logging
+import gzip
+import tempfile
+
+log = logging.getLogger("blendfile")
+log.setLevel(logging.ERROR)
+
+FILE_BUFFER_SIZE = 1024 * 1024
+
+
+# -----------------------------------------------------------------------------
+# module global routines
+#
+# read routines
+# open a filename
+# determine if the file is compressed
+# and returns a handle
+def open_blend(filename, access="rb"):
+ """Opens a blend file for reading or writing pending on the access
+ supports 2 kind of blend files. Uncompressed and compressed.
+ Known issue: does not support packaged blend files
+ """
+ handle = open(filename, access)
+ magic_test = b"BLENDER"
+ magic = handle.read(len(magic_test))
+ if magic == magic_test:
+ log.debug("normal blendfile detected")
+ handle.seek(0, os.SEEK_SET)
+ bfile = BlendFile(handle)
+ bfile.is_compressed = False
+ bfile.filepath_orig = filename
+ return bfile
+ elif magic[:2] == b'\x1f\x8b':
+ log.debug("gzip blendfile detected")
+ handle.close()
+ log.debug("decompressing started")
+ fs = gzip.open(filename, "rb")
+ data = fs.read(FILE_BUFFER_SIZE)
+ magic = data[:len(magic_test)]
+ if magic == magic_test:
+ handle = tempfile.TemporaryFile()
+ while data:
+ handle.write(data)
+ data = fs.read(FILE_BUFFER_SIZE)
+ log.debug("decompressing finished")
+ fs.close()
+ log.debug("resetting decompressed file")
+ handle.seek(os.SEEK_SET, 0)
+ bfile = BlendFile(handle)
+ bfile.is_compressed = True
+ bfile.filepath_orig = filename
+ return bfile
+ else:
+ raise Exception("filetype inside gzip not a blend")
+ else:
+ raise Exception("filetype not a blend or a gzip blend")
+
+
+def align(offset, by):
+ n = by - 1
+ return (offset + n) & ~n
+
+
+# -----------------------------------------------------------------------------
+# module classes
+
+
+class BlendFile:
+ """
+ Blend file.
+ """
+ __slots__ = (
+ # file (result of open())
+ "handle",
+ # str (original name of the file path)
+ "filepath_orig",
+ # BlendFileHeader
+ "header",
+ # struct.Struct
+ "block_header_struct",
+ # BlendFileBlock
+ "blocks",
+ # [DNAStruct, ...]
+ "structs",
+ # dict {b'StructName': sdna_index}
+ # (where the index is an index into 'structs')
+ "sdna_index_from_id",
+ # dict {addr_old: block}
+ "block_from_offset",
+ # int
+ "code_index",
+ # bool (did we make a change)
+ "is_modified",
+ # bool (is file gzipped)
+ "is_compressed",
+ )
+
+ def __init__(self, handle):
+ log.debug("initializing reading blend-file")
+ self.handle = handle
+ self.header = BlendFileHeader(handle)
+ self.block_header_struct = self.header.create_block_header_struct()
+ self.blocks = []
+ self.code_index = {}
+
+ block = BlendFileBlock(handle, self)
+ while block.code != b'ENDB':
+ if block.code == b'DNA1':
+ (self.structs,
+ self.sdna_index_from_id,
+ ) = BlendFile.decode_structs(self.header, block, handle)
+ else:
+ handle.seek(block.size, os.SEEK_CUR)
+
+ self.blocks.append(block)
+ self.code_index.setdefault(block.code, []).append(block)
+
+ block = BlendFileBlock(handle, self)
+ self.is_modified = False
+ self.blocks.append(block)
+
+ # cache (could lazy init, incase we never use?)
+ self.block_from_offset = {block.addr_old: block for block in self.blocks if block.code != b'ENDB'}
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, type, value, traceback):
+ self.close()
+
+ def find_blocks_from_code(self, code):
+ assert(type(code) == bytes)
+ if code not in self.code_index:
+ return []
+ return self.code_index[code]
+
+ def find_block_from_offset(self, offset):
+ # same as looking looping over all blocks,
+ # then checking ``block.addr_old == offset``
+ assert(type(offset) is int)
+ return self.block_from_offset.get(offset)
+
+ def close(self):
+ """
+ Close the blend file
+ writes the blend file to disk if changes has happened
+ """
+ if not self.is_modified:
+ self.handle.close()
+ else:
+ handle = self.handle
+ if self.is_compressed:
+ log.debug("close compressed blend file")
+ handle.seek(os.SEEK_SET, 0)
+ log.debug("compressing started")
+ fs = gzip.open(self.filepath_orig, "wb")
+ data = handle.read(FILE_BUFFER_SIZE)
+ while data:
+ fs.write(data)
+ data = handle.read(FILE_BUFFER_SIZE)
+ fs.close()
+ log.debug("compressing finished")
+
+ handle.close()
+
+ def ensure_subtype_smaller(self, sdna_index_curr, sdna_index_next):
+ # never refine to a smaller type
+ if (self.structs[sdna_index_curr].size >
+ self.structs[sdna_index_next].size):
+
+ raise RuntimeError("cant refine to smaller type (%s -> %s)" %
+ (self.structs[sdna_index_curr].dna_type_id.decode('ascii'),
+ self.structs[sdna_index_next].dna_type_id.decode('ascii')))
+
+ @staticmethod
+ def decode_structs(header, block, handle):
+ """
+ DNACatalog is a catalog of all information in the DNA1 file-block
+ """
+ log.debug("building DNA catalog")
+ shortstruct = DNA_IO.USHORT[header.endian_index]
+ shortstruct2 = struct.Struct(header.endian_str + b'HH')
+ intstruct = DNA_IO.UINT[header.endian_index]
+
+ data = handle.read(block.size)
+ types = []
+ names = []
+
+ structs = []
+ sdna_index_from_id = {}
+
+ offset = 8
+ names_len = intstruct.unpack_from(data, offset)[0]
+ offset += 4
+
+ log.debug("building #%d names" % names_len)
+ for i in range(names_len):
+ tName = DNA_IO.read_data0_offset(data, offset)
+ offset = offset + len(tName) + 1
+ names.append(DNAName(tName))
+ del names_len
+
+ offset = align(offset, 4)
+ offset += 4
+ types_len = intstruct.unpack_from(data, offset)[0]
+ offset += 4
+ log.debug("building #%d types" % types_len)
+ for i in range(types_len):
+ dna_type_id = DNA_IO.read_data0_offset(data, offset)
+ # None will be replaced by the DNAStruct, below
+ types.append(DNAStruct(dna_type_id))
+ offset += len(dna_type_id) + 1
+
+ offset = align(offset, 4)
+ offset += 4
+ log.debug("building #%d type-lengths" % types_len)
+ for i in range(types_len):
+ tLen = shortstruct.unpack_from(data, offset)[0]
+ offset = offset + 2
+ types[i].size = tLen
+ del types_len
+
+ offset = align(offset, 4)
+ offset += 4
+
+ structs_len = intstruct.unpack_from(data, offset)[0]
+ offset += 4
+ log.debug("building #%d structures" % structs_len)
+ for sdna_index in range(structs_len):
+ d = shortstruct2.unpack_from(data, offset)
+ struct_type_index = d[0]
+ offset += 4
+ dna_struct = types[struct_type_index]
+ sdna_index_from_id[dna_struct.dna_type_id] = sdna_index
+ structs.append(dna_struct)
+
+ fields_len = d[1]
+ dna_offset = 0
+
+ for field_index in range(fields_len):
+ d2 = shortstruct2.unpack_from(data, offset)
+ field_type_index = d2[0]
+ field_name_index = d2[1]
+ offset += 4
+ dna_type = types[field_type_index]
+ dna_name = names[field_name_index]
+ if dna_name.is_pointer or dna_name.is_method_pointer:
+ dna_size = header.pointer_size * dna_name.array_size
+ else:
+ dna_size = dna_type.size * dna_name.array_size
+
+ field = DNAField(dna_type, dna_name, dna_size, dna_offset)
+ dna_struct.fields.append(field)
+ dna_struct.field_from_name[dna_name.name_only] = field
+ dna_offset += dna_size
+
+ return structs, sdna_index_from_id
+
+
+class BlendFileBlock:
+ """
+ Instance of a struct.
+ """
+ __slots__ = (
+ # BlendFile
+ "file",
+ "code",
+ "size",
+ "addr_old",
+ "sdna_index",
+ "count",
+ "file_offset",
+ "user_data",
+ )
+
+ def __str__(self):
+ return ("<%s.%s (%s), size=%d at %s>" %
+ # fields=[%s]
+ (self.__class__.__name__,
+ self.dna_type.dna_type_id.decode('ascii'),
+ self.code.decode(),
+ self.size,
+ # b", ".join(f.dna_name.name_only for f in self.dna_type.fields).decode('ascii'),
+ hex(self.addr_old),
+ ))
+
+ def __init__(self, handle, bfile):
+ OLDBLOCK = struct.Struct(b'4sI')
+
+ self.file = bfile
+ self.user_data = None
+
+ data = handle.read(bfile.block_header_struct.size)
+ # header size can be 8, 20, or 24 bytes long
+ # 8: old blend files ENDB block (exception)
+ # 20: normal headers 32 bit platform
+ # 24: normal headers 64 bit platform
+ if len(data) > 15:
+
+ blockheader = bfile.block_header_struct.unpack(data)
+ self.code = blockheader[0].partition(b'\0')[0]
+ if self.code != b'ENDB':
+ self.size = blockheader[1]
+ self.addr_old = blockheader[2]
+ self.sdna_index = blockheader[3]
+ self.count = blockheader[4]
+ self.file_offset = handle.tell()
+ else:
+ self.size = 0
+ self.addr_old = 0
+ self.sdna_index = 0
+ self.count = 0
+ self.file_offset = 0
+ else:
+ blockheader = OLDBLOCK.unpack(data)
+ self.code = blockheader[0].partition(b'\0')[0]
+ self.code = DNA_IO.read_data0(blockheader[0])
+ self.size = 0
+ self.addr_old = 0
+ self.sdna_index = 0
+ self.count = 0
+ self.file_offset = 0
+
+ @property
+ def dna_type(self):
+ return self.file.structs[self.sdna_index]
+
+ def refine_type_from_index(self, sdna_index_next):
+ assert(type(sdna_index_next) is int)
+ sdna_index_curr = self.sdna_index
+ self.file.ensure_subtype_smaller(sdna_index_curr, sdna_index_next)
+ self.sdna_index = sdna_index_next
+
+ def refine_type(self, dna_type_id):
+ assert(type(dna_type_id) is bytes)
+ self.refine_type_from_index(self.file.sdna_index_from_id[dna_type_id])
+
+ def get_file_offset(self, path,
+ default=...,
+ sdna_index_refine=None,
+ base_index=0,
+ ):
+ """
+ Return (offset, length)
+ """
+ assert(type(path) is bytes)
+
+ ofs = self.file_offset
+ if base_index != 0:
+ assert(base_index < self.count)
+ ofs += (self.size // self.count) * base_index
+ self.file.handle.seek(ofs, os.SEEK_SET)
+
+ if sdna_index_refine is None:
+ sdna_index_refine = self.sdna_index
+ else:
+ self.file.ensure_subtype_smaller(self.sdna_index, sdna_index_refine)
+
+ dna_struct = self.file.structs[sdna_index_refine]
+ field = dna_struct.field_from_path(
+ self.file.header, self.file.handle, path)
+
+ return (self.file.handle.tell(), field.dna_name.array_size)
+
+ def get(self, path,
+ default=...,
+ sdna_index_refine=None,
+ use_nil=True, use_str=True,
+ base_index=0,
+ ):
+
+ ofs = self.file_offset
+ if base_index != 0:
+ assert(base_index < self.count)
+ ofs += (self.size // self.count) * base_index
+ self.file.handle.seek(ofs, os.SEEK_SET)
+
+ if sdna_index_refine is None:
+ sdna_index_refine = self.sdna_index
+ else:
+ self.file.ensure_subtype_smaller(self.sdna_index, sdna_index_refine)
+
+ dna_struct = self.file.structs[sdna_index_refine]
+ return dna_struct.field_get(
+ self.file.header, self.file.handle, path,
+ default=default,
+ use_nil=use_nil, use_str=use_str,
+ )
+
+ def get_recursive_iter(self, path, path_root=b"",
+ default=...,
+ sdna_index_refine=None,
+ use_nil=True, use_str=True,
+ base_index=0,
+ ):
+ if path_root:
+ path_full = (
+ (path_root if type(path_root) is tuple else (path_root, )) +
+ (path if type(path) is tuple else (path, )))
+ else:
+ path_full = path
+
+ try:
+ yield (path_full, self.get(path_full, default, sdna_index_refine, use_nil, use_str, base_index))
+ except NotImplementedError as ex:
+ msg, dna_name, dna_type = ex.args
+ struct_index = self.file.sdna_index_from_id.get(dna_type.dna_type_id, None)
+ if struct_index is None:
+ yield (path_full, "<%s>" % dna_type.dna_type_id.decode('ascii'))
+ else:
+ struct = self.file.structs[struct_index]
+ for f in struct.fields:
+ yield from self.get_recursive_iter(
+ f.dna_name.name_only, path_full, default, None, use_nil, use_str, 0)
+
+ def items_recursive_iter(self):
+ for k in self.keys():
+ yield from self.get_recursive_iter(k, use_str=False)
+
+ def get_data_hash(self):
+ """
+ Generates a 'hash' that can be used instead of addr_old as block id, and that should be 'stable' across .blend
+ file load & save (i.e. it does not changes due to pointer addresses variations).
+ """
+ # TODO This implementation is most likely far from optimal... and CRC32 is not renown as the best hashing
+ # algo either. But for now does the job!
+ import zlib
+ def _is_pointer(self, k):
+ return self.file.structs[self.sdna_index].field_from_path(
+ self.file.header, self.file.handle, k).dna_name.is_pointer
+
+ hsh = 1
+ for k, v in self.items_recursive_iter():
+ if not _is_pointer(self, k):
+ hsh = zlib.adler32(str(v).encode(), hsh)
+ return hsh
+
+ def set(self, path, value,
+ sdna_index_refine=None,
+ ):
+
+ if sdna_index_refine is None:
+ sdna_index_refine = self.sdna_index
+ else:
+ self.file.ensure_subtype_smaller(self.sdna_index, sdna_index_refine)
+
+ dna_struct = self.file.structs[sdna_index_refine]
+ self.file.handle.seek(self.file_offset, os.SEEK_SET)
+ self.file.is_modified = True
+ return dna_struct.field_set(
+ self.file.header, self.file.handle, path, value)
+
+ # ---------------
+ # Utility get/set
+ #
+ # avoid inline pointer casting
+ def get_pointer(
+ self, path,
+ default=...,
+ sdna_index_refine=None,
+ base_index=0,
+ ):
+ if sdna_index_refine is None:
+ sdna_index_refine = self.sdna_index
+ result = self.get(path, default, sdna_index_refine=sdna_index_refine, base_index=base_index)
+
+ # default
+ if type(result) is not int:
+ return result
+
+ assert(self.file.structs[sdna_index_refine].field_from_path(
+ self.file.header, self.file.handle, path).dna_name.is_pointer)
+ if result != 0:
+ # possible (but unlikely)
+ # that this fails and returns None
+ # maybe we want to raise some exception in this case
+ return self.file.find_block_from_offset(result)
+ else:
+ return None
+
+ # ----------------------
+ # Python convenience API
+
+ # dict like access
+ def __getitem__(self, item):
+ return self.get(item, use_str=False)
+
+ def __setitem__(self, item, value):
+ self.set(item, value)
+
+ def keys(self):
+ return (f.dna_name.name_only for f in self.dna_type.fields)
+
+ def values(self):
+ for k in self.keys():
+ try:
+ yield self[k]
+ except NotImplementedError as ex:
+ msg, dna_name, dna_type = ex.args
+ yield "<%s>" % dna_type.dna_type_id.decode('ascii')
+
+ def items(self):
+ for k in self.keys():
+ try:
+ yield (k, self[k])
+ except NotImplementedError as ex:
+ msg, dna_name, dna_type = ex.args
+ yield (k, "<%s>" % dna_type.dna_type_id.decode('ascii'))
+
+
+# -----------------------------------------------------------------------------
+# Read Magic
+#
+# magic = str
+# pointer_size = int
+# is_little_endian = bool
+# version = int
+
+
+class BlendFileHeader:
+ """
+ BlendFileHeader allocates the first 12 bytes of a blend file
+ it contains information about the hardware architecture
+ """
+ __slots__ = (
+ # str
+ "magic",
+ # int 4/8
+ "pointer_size",
+ # bool
+ "is_little_endian",
+ # int
+ "version",
+ # str, used to pass to 'struct'
+ "endian_str",
+ # int, used to index common types
+ "endian_index",
+ )
+
+ def __init__(self, handle):
+ FILEHEADER = struct.Struct(b'7s1s1s3s')
+
+ log.debug("reading blend-file-header")
+ values = FILEHEADER.unpack(handle.read(FILEHEADER.size))
+ self.magic = values[0]
+ pointer_size_id = values[1]
+ if pointer_size_id == b'-':
+ self.pointer_size = 8
+ elif pointer_size_id == b'_':
+ self.pointer_size = 4
+ else:
+ assert(0)
+ endian_id = values[2]
+ if endian_id == b'v':
+ self.is_little_endian = True
+ self.endian_str = b'<'
+ self.endian_index = 0
+ elif endian_id == b'V':
+ self.is_little_endian = False
+ self.endian_index = 1
+ self.endian_str = b'>'
+ else:
+ assert(0)
+
+ version_id = values[3]
+ self.version = int(version_id)
+
+ def create_block_header_struct(self):
+ return struct.Struct(b''.join((
+ self.endian_str,
+ b'4sI',
+ b'I' if self.pointer_size == 4 else b'Q',
+ b'II',
+ )))
+
+
+class DNAName:
+ """
+ DNAName is a C-type name stored in the DNA
+ """
+ __slots__ = (
+ "name_full",
+ "name_only",
+ "is_pointer",
+ "is_method_pointer",
+ "array_size",
+ )
+
+ def __init__(self, name_full):
+ self.name_full = name_full
+ self.name_only = self.calc_name_only()
+ self.is_pointer = self.calc_is_pointer()
+ self.is_method_pointer = self.calc_is_method_pointer()
+ self.array_size = self.calc_array_size()
+
+ def as_reference(self, parent):
+ if parent is None:
+ result = b''
+ else:
+ result = parent + b'.'
+
+ result = result + self.name_only
+ return result
+
+ def calc_name_only(self):
+ result = self.name_full.strip(b'*()')
+ index = result.find(b'[')
+ if index != -1:
+ result = result[:index]
+ return result
+
+ def calc_is_pointer(self):
+ return (b'*' in self.name_full)
+
+ def calc_is_method_pointer(self):
+ return (b'(*' in self.name_full)
+
+ def calc_array_size(self):
+ result = 1
+ temp = self.name_full
+ index = temp.find(b'[')
+
+ while index != -1:
+ index_2 = temp.find(b']')
+ result *= int(temp[index + 1:index_2])
+ temp = temp[index_2 + 1:]
+ index = temp.find(b'[')
+
+ return result
+
+
+class DNAField:
+ """
+ DNAField is a coupled DNAStruct and DNAName
+ and cache offset for reuse
+ """
+ __slots__ = (
+ # DNAName
+ "dna_name",
+ # tuple of 3 items
+ # [bytes (struct name), int (struct size), DNAStruct]
+ "dna_type",
+ # size on-disk
+ "dna_size",
+ # cached info (avoid looping over fields each time)
+ "dna_offset",
+ )
+
+ def __init__(self, dna_type, dna_name, dna_size, dna_offset):
+ self.dna_type = dna_type
+ self.dna_name = dna_name
+ self.dna_size = dna_size
+ self.dna_offset = dna_offset
+
+
+class DNAStruct:
+ """
+ DNAStruct is a C-type structure stored in the DNA
+ """
+ __slots__ = (
+ "dna_type_id",
+ "size",
+ "fields",
+ "field_from_name",
+ "user_data",
+ )
+
+ def __init__(self, dna_type_id):
+ self.dna_type_id = dna_type_id
+ self.fields = []
+ self.field_from_name = {}
+ self.user_data = None
+
+ def field_from_path(self, header, handle, path):
+ """
+ Support lookups as bytes or a tuple of bytes and optional index.
+
+ C style 'id.name' --> (b'id', b'name')
+ C style 'array[4]' --> ('array', 4)
+ """
+ if type(path) is tuple:
+ name = path[0]
+ if len(path) >= 2 and type(path[1]) is not bytes:
+ name_tail = path[2:]
+ index = path[1]
+ assert(type(index) is int)
+ else:
+ name_tail = path[1:]
+ index = 0
+ else:
+ name = path
+ name_tail = None
+ index = 0
+
+ assert(type(name) is bytes)
+
+ field = self.field_from_name.get(name)
+
+ if field is not None:
+ handle.seek(field.dna_offset, os.SEEK_CUR)
+ if index != 0:
+ if field.dna_name.is_pointer:
+ index_offset = header.pointer_size * index
+ else:
+ index_offset = field.dna_type.size * index
+ assert(index_offset < field.dna_size)
+ handle.seek(index_offset, os.SEEK_CUR)
+ if not name_tail: # None or ()
+ return field
+ else:
+ return field.dna_type.field_from_path(header, handle, name_tail)
+
+ def field_get(self, header, handle, path,
+ default=...,
+ use_nil=True, use_str=True,
+ ):
+ field = self.field_from_path(header, handle, path)
+ if field is None:
+ if default is not ...:
+ return default
+ else:
+ raise KeyError("%r not found in %r (%r)" %
+ (path, [f.dna_name.name_only for f in self.fields], self.dna_type_id))
+
+ dna_type = field.dna_type
+ dna_name = field.dna_name
+
+ if dna_name.is_pointer:
+ return DNA_IO.read_pointer(handle, header)
+ elif dna_type.dna_type_id == b'int':
+ if dna_name.array_size > 1:
+ return [DNA_IO.read_int(handle, header) for i in range(dna_name.array_size)]
+ return DNA_IO.read_int(handle, header)
+ elif dna_type.dna_type_id == b'short':
+ if dna_name.array_size > 1:
+ return [DNA_IO.read_short(handle, header) for i in range(dna_name.array_size)]
+ return DNA_IO.read_short(handle, header)
+ elif dna_type.dna_type_id == b'uint64_t':
+ if dna_name.array_size > 1:
+ return [DNA_IO.read_ulong(handle, header) for i in range(dna_name.array_size)]
+ return DNA_IO.read_ulong(handle, header)
+ elif dna_type.dna_type_id == b'float':
+ if dna_name.array_size > 1:
+ return [DNA_IO.read_float(handle, header) for i in range(dna_name.array_size)]
+ return DNA_IO.read_float(handle, header)
+ elif dna_type.dna_type_id == b'char':
+ if use_str:
+ if use_nil:
+ return DNA_IO.read_string0(handle, dna_name.array_size)
+ else:
+ return DNA_IO.read_string(handle, dna_name.array_size)
+ else:
+ if use_nil:
+ return DNA_IO.read_bytes0(handle, dna_name.array_size)
+ else:
+ return DNA_IO.read_bytes(handle, dna_name.array_size)
+ else:
+ raise NotImplementedError("%r exists but isn't pointer, can't resolve field %r" %
+ (path, dna_name.name_only), dna_name, dna_type)
+
+ def field_set(self, header, handle, path, value):
+ assert(type(path) == bytes)
+
+ field = self.field_from_path(header, handle, path)
+ if field is None:
+ raise KeyError("%r not found in %r" %
+ (path, [f.dna_name.name_only for f in self.fields]))
+
+ dna_type = field.dna_type
+ dna_name = field.dna_name
+
+ if dna_type.dna_type_id == b'char':
+ if type(value) is str:
+ return DNA_IO.write_string(handle, value, dna_name.array_size)
+ else:
+ return DNA_IO.write_bytes(handle, value, dna_name.array_size)
+ else:
+ raise NotImplementedError("Setting %r is not yet supported" %
+ dna_type[0], dna_name, dna_type)
+
+
+class DNA_IO:
+ """
+ Module like class, for read-write utility functions.
+
+ Only stores static methods & constants.
+ """
+
+ __slots__ = ()
+
+ def __new__(cls, *args, **kwargs):
+ raise RuntimeError("%s should not be instantiated" % cls)
+
+ @staticmethod
+ def write_string(handle, astring, fieldlen):
+ assert(isinstance(astring, str))
+ if len(astring) >= fieldlen:
+ stringw = astring[0:fieldlen]
+ else:
+ stringw = astring + '\0'
+ handle.write(stringw.encode('utf-8'))
+
+ @staticmethod
+ def write_bytes(handle, astring, fieldlen):
+ assert(isinstance(astring, (bytes, bytearray)))
+ if len(astring) >= fieldlen:
+ stringw = astring[0:fieldlen]
+ else:
+ stringw = astring + b'\0'
+
+ handle.write(stringw)
+
+ @staticmethod
+ def read_bytes(handle, length):
+ data = handle.read(length)
+ return data
+
+ @staticmethod
+ def read_bytes0(handle, length):
+ data = handle.read(length)
+ return DNA_IO.read_data0(data)
+
+ @staticmethod
+ def read_string(handle, length):
+ return DNA_IO.read_bytes(handle, length).decode('utf-8')
+
+ @staticmethod
+ def read_string0(handle, length):
+ return DNA_IO.read_bytes0(handle, length).decode('utf-8')
+
+ @staticmethod
+ def read_data0_offset(data, offset):
+ add = data.find(b'\0', offset) - offset
+ return data[offset:offset + add]
+
+ @staticmethod
+ def read_data0(data):
+ add = data.find(b'\0')
+ return data[:add]
+
+ USHORT = struct.Struct(b'<H'), struct.Struct(b'>H')
+
+ @staticmethod
+ def read_ushort(handle, fileheader):
+ st = DNA_IO.USHORT[fileheader.endian_index]
+ return st.unpack(handle.read(st.size))[0]
+
+ SSHORT = struct.Struct(b'<h'), struct.Struct(b'>h')
+
+ @staticmethod
+ def read_short(handle, fileheader):
+ st = DNA_IO.SSHORT[fileheader.endian_index]
+ return st.unpack(handle.read(st.size))[0]
+
+ UINT = struct.Struct(b'<I'), struct.Struct(b'>I')
+
+ @staticmethod
+ def read_uint(handle, fileheader):
+ st = DNA_IO.UINT[fileheader.endian_index]
+ return st.unpack(handle.read(st.size))[0]
+
+ SINT = struct.Struct(b'<i'), struct.Struct(b'>i')
+
+ @staticmethod
+ def read_int(handle, fileheader):
+ st = DNA_IO.SINT[fileheader.endian_index]
+ return st.unpack(handle.read(st.size))[0]
+
+ FLOAT = struct.Struct(b'<f'), struct.Struct(b'>f')
+
+ @staticmethod
+ def read_float(handle, fileheader):
+ st = DNA_IO.FLOAT[fileheader.endian_index]
+ return st.unpack(handle.read(st.size))[0]
+
+ ULONG = struct.Struct(b'<Q'), struct.Struct(b'>Q')
+
+ @staticmethod
+ def read_ulong(handle, fileheader):
+ st = DNA_IO.ULONG[fileheader.endian_index]
+ return st.unpack(handle.read(st.size))[0]
+
+ @staticmethod
+ def read_pointer(handle, header):
+ """
+ reads an pointer from a file handle
+ the pointer size is given by the header (BlendFileHeader)
+ """
+ if header.pointer_size == 4:
+ st = DNA_IO.UINT[header.endian_index]
+ return st.unpack(handle.read(st.size))[0]
+ if header.pointer_size == 8:
+ st = DNA_IO.ULONG[header.endian_index]
+ return st.unpack(handle.read(st.size))[0]
diff --git a/io_blend_utils/blend/blendfile_path_walker.py b/io_blend_utils/blend/blendfile_path_walker.py
new file mode 100644
index 00000000..9c6c800f
--- /dev/null
+++ b/io_blend_utils/blend/blendfile_path_walker.py
@@ -0,0 +1,939 @@
+#!/usr/bin/env python3
+
+# ***** BEGIN GPL LICENSE BLOCK *****
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License
+# as published by the Free Software Foundation; either version 2
+# of the License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
+#
+# ***** END GPL LICENCE BLOCK *****
+
+import os
+# gives problems with scripts that use stdout, for testing 'bam deps' for eg.
+VERBOSE = False # os.environ.get('BAM_VERBOSE', False)
+TIMEIT = False
+
+USE_ALEMBIC_BRANCH = False
+
+
+class C_defs:
+ __slots__ = ()
+
+ def __new__(cls, *args, **kwargs):
+ raise RuntimeError("%s should not be instantiated" % cls)
+
+ # DNA_sequence_types.h (Sequence.type)
+ SEQ_TYPE_IMAGE = 0
+ SEQ_TYPE_META = 1
+ SEQ_TYPE_SCENE = 2
+ SEQ_TYPE_MOVIE = 3
+ SEQ_TYPE_SOUND_RAM = 4
+ SEQ_TYPE_SOUND_HD = 5
+ SEQ_TYPE_MOVIECLIP = 6
+ SEQ_TYPE_MASK = 7
+ SEQ_TYPE_EFFECT = 8
+
+ IMA_SRC_FILE = 1
+ IMA_SRC_SEQUENCE = 2
+ IMA_SRC_MOVIE = 3
+
+ # DNA_modifier_types.h
+ eModifierType_MeshCache = 46
+
+ # DNA_particle_types.h
+ PART_DRAW_OB = 7
+ PART_DRAW_GR = 8
+
+ # DNA_object_types.h
+ # Object.transflag
+ OB_DUPLIGROUP = 1 << 8
+
+ if USE_ALEMBIC_BRANCH:
+ CACHE_LIBRARY_SOURCE_CACHE = 1
+
+
+if VERBOSE:
+ import logging
+ log_deps = logging.getLogger("path_walker")
+ del logging
+
+ def set_as_str(s):
+ if s is None:
+ return "None"
+ else:
+ return (", ".join(sorted(i.decode('ascii') for i in sorted(s))))
+
+
+class FPElem:
+ """
+ Tiny filepath class to hide blendfile.
+ """
+
+ __slots__ = (
+ "basedir",
+
+ # library link level
+ "level",
+
+ # True when this is apart of a sequence (image or movieclip)
+ "is_sequence",
+
+ "userdata",
+ )
+
+ def __init__(self, basedir, level,
+ # subclasses get/set functions should use
+ userdata):
+ self.basedir = basedir
+ self.level = level
+ self.is_sequence = False
+
+ # subclass must call
+ self.userdata = userdata
+
+ def files_siblings(self):
+ return ()
+
+ # --------
+ # filepath
+
+ def filepath_absolute_resolve(self, basedir=None):
+ """
+ Resolve the filepath, with the option to override the basedir.
+ """
+ filepath = self.filepath
+ if filepath.startswith(b'//'):
+ if basedir is None:
+ basedir = self.basedir
+ return os.path.normpath(os.path.join(
+ basedir,
+ utils.compatpath(filepath[2:]),
+ ))
+ else:
+ return utils.compatpath(filepath)
+
+ def filepath_assign_edits(self, filepath, binary_edits):
+ self._set_cb_edits(filepath, binary_edits)
+
+ @staticmethod
+ def _filepath_assign_edits(block, path, filepath, binary_edits):
+ """
+ Record the write to a separate entry (binary file-like object),
+ this lets us replay the edits later.
+ (so we can replay them onto the clients local cache without a file transfer).
+ """
+ import struct
+ assert(type(filepath) is bytes)
+ assert(type(path) is bytes)
+ ofs, size = block.get_file_offset(path)
+ # ensure we dont write past the field size & allow for \0
+ filepath = filepath[:size - 1]
+ binary_edits.append((ofs, filepath + b'\0'))
+
+ @property
+ def filepath(self):
+ return self._get_cb()
+
+ @filepath.setter
+ def filepath(self, filepath):
+ self._set_cb(filepath)
+
+ @property
+ def filepath_absolute(self):
+ return self.filepath_absolute_resolve()
+
+
+class FPElem_block_path(FPElem):
+ """
+ Simple block-path:
+ userdata = (block, path)
+ """
+ __slots__ = ()
+
+ def _get_cb(self):
+ block, path = self.userdata
+ return block[path]
+
+ def _set_cb(self, filepath):
+ block, path = self.userdata
+ block[path] = filepath
+
+ def _set_cb_edits(self, filepath, binary_edits):
+ block, path = self.userdata
+ self._filepath_assign_edits(block, path, filepath, binary_edits)
+
+
+class FPElem_sequence_single(FPElem):
+ """
+ Movie sequence
+ userdata = (block, path, sub_block, sub_path)
+ """
+ __slots__ = ()
+
+ def _get_cb(self):
+ block, path, sub_block, sub_path = self.userdata
+ return block[path] + sub_block[sub_path]
+
+ def _set_cb(self, filepath):
+ block, path, sub_block, sub_path = self.userdata
+ head, sep, tail = utils.splitpath(filepath)
+
+ block[path] = head + sep
+ sub_block[sub_path] = tail
+
+ def _set_cb_edits(self, filepath, binary_edits):
+ block, path, sub_block, sub_path = self.userdata
+ head, sep, tail = utils.splitpath(filepath)
+
+ self._filepath_assign_edits(block, path, head + sep, binary_edits)
+ self._filepath_assign_edits(sub_block, sub_path, tail, binary_edits)
+
+
+class FPElem_sequence_image_seq(FPElem_sequence_single):
+ """
+ Image sequence
+ userdata = (block, path, sub_block, sub_path)
+ """
+ __slots__ = ()
+
+ def files_siblings(self):
+ block, path, sub_block, sub_path = self.userdata
+
+ array = block.get_pointer(b'stripdata')
+ files = [array.get(b'name', use_str=False, base_index=i) for i in range(array.count)]
+ return files
+
+
+class FilePath:
+ __slots__ = ()
+
+ def __new__(cls, *args, **kwargs):
+ raise RuntimeError("%s should not be instantiated" % cls)
+
+ # ------------------------------------------------------------------------
+ # Main function to visit paths
+ @staticmethod
+ def visit_from_blend(
+ filepath,
+
+ # never modify the blend
+ readonly=True,
+ # callback that creates a temp file and returns its path.
+ temp_remap_cb=None,
+
+ # recursive options
+ recursive=False,
+ # recurse all indirectly linked data
+ # (not just from the initially referenced blend file)
+ recursive_all=False,
+ # list of ID block names we want to load, or None to load all
+ block_codes=None,
+ # root when we're loading libs indirectly
+ rootdir=None,
+ level=0,
+ # dict of id's used so we don't follow these links again
+ # prevents cyclic references too!
+ # {lib_path: set([block id's ...])}
+ lib_visit=None,
+
+ # optional blendfile callbacks
+ # These callbacks run on enter-exit blend files
+ # so you can keep track of what file and level you're at.
+ blendfile_level_cb=(None, None),
+ ):
+ # print(level, block_codes)
+ import os
+
+ filepath = os.path.abspath(filepath)
+
+ if VERBOSE:
+ indent_str = " " * level
+ # print(indent_str + "Opening:", filepath)
+ # print(indent_str + "... blocks:", block_codes)
+
+ log_deps.info("~")
+ log_deps.info("%s%s" % (indent_str, filepath.decode('utf-8')))
+ log_deps.info("%s%s" % (indent_str, set_as_str(block_codes)))
+
+ blendfile_level_cb_enter, blendfile_level_cb_exit = blendfile_level_cb
+
+ if blendfile_level_cb_enter is not None:
+ blendfile_level_cb_enter(filepath)
+
+ basedir = os.path.dirname(filepath)
+ if rootdir is None:
+ rootdir = basedir
+
+ if lib_visit is None:
+ lib_visit = {}
+
+
+
+ if recursive and (level > 0) and (block_codes is not None) and (recursive_all is False):
+ # prevent from expanding the
+ # same datablock more then once
+ # note: we could *almost* id_name, however this isn't unique for libraries.
+ expand_addr_visit = set()
+ # {lib_id: {block_ids... }}
+ expand_codes_idlib = {}
+
+ # libraries used by this blend
+ block_codes_idlib = set()
+
+ # XXX, checking 'block_codes' isn't 100% reliable,
+ # but at least don't touch the same blocks twice.
+ # whereas block_codes is intended to only operate on blocks we requested.
+ lib_block_codes_existing = lib_visit.setdefault(filepath, set())
+
+ # only for this block
+ def _expand_codes_add_test(block, code):
+ # return True, if the ID should be searched further
+ #
+ # we could investigate a better way...
+ # Not to be accessing ID blocks at this point. but its harmless
+ if code == b'ID':
+ assert(code == block.code)
+ if recursive:
+ expand_codes_idlib.setdefault(block[b'lib'], set()).add(block[b'name'])
+ return False
+ else:
+ id_name = block[b'id', b'name']
+
+ # if we touched this already, don't touch again
+ # (else we may modify the same path multiple times)
+ #
+ # FIXME, works in some cases but not others
+ # keep, without this we get errors
+ # Gooseberry r668
+ # bam pack scenes/01_island/01_meet_franck/01_01_01_A/01_01_01_A.comp.blend
+ # gives strange errors
+ '''
+ if id_name not in block_codes:
+ return False
+ '''
+
+ # instead just don't operate on blocks multiple times
+ # ... rather than attempt to check on what we need or not.
+ len_prev = len(lib_block_codes_existing)
+ lib_block_codes_existing.add(id_name)
+ if len_prev == len(lib_block_codes_existing):
+ return False
+
+ len_prev = len(expand_addr_visit)
+ expand_addr_visit.add(block.addr_old)
+ return (len_prev != len(expand_addr_visit))
+
+ def block_expand(block, code):
+ assert(block.code == code)
+ if _expand_codes_add_test(block, code):
+ yield block
+
+ assert(block.code == code)
+ fn = ExpandID.expand_funcs.get(code)
+ if fn is not None:
+ for sub_block in fn(block):
+ if sub_block is not None:
+ yield from block_expand(sub_block, sub_block.code)
+ else:
+ if code == b'ID':
+ yield block
+ else:
+ expand_addr_visit = None
+
+ # set below
+ expand_codes_idlib = None
+
+ # never set
+ block_codes_idlib = None
+
+ def block_expand(block, code):
+ assert(block.code == code)
+ yield block
+
+ # ------
+ # Define
+ #
+ # - iter_blocks_id(code)
+ # - iter_blocks_idlib()
+ if block_codes is None:
+ def iter_blocks_id(code):
+ return blend.find_blocks_from_code(code)
+
+ def iter_blocks_idlib():
+ return blend.find_blocks_from_code(b'LI')
+ else:
+ def iter_blocks_id(code):
+ for block in blend.find_blocks_from_code(code):
+ if block[b'id', b'name'] in block_codes:
+ yield from block_expand(block, code)
+
+ if block_codes_idlib is not None:
+ def iter_blocks_idlib():
+ for block in blend.find_blocks_from_code(b'LI'):
+ # TODO, this should work but in fact mades some libs not link correctly.
+ if block[b'name'] in block_codes_idlib:
+ yield from block_expand(block, b'LI')
+ else:
+ def iter_blocks_idlib():
+ return blend.find_blocks_from_code(b'LI')
+
+ if temp_remap_cb is not None:
+ filepath_tmp = temp_remap_cb(filepath, rootdir)
+ else:
+ filepath_tmp = filepath
+
+ # store info to pass along with each iteration
+ extra_info = rootdir, os.path.basename(filepath)
+
+ from blend import blendfile
+ with blendfile.open_blend(filepath_tmp, "rb" if readonly else "r+b") as blend:
+
+ for code in blend.code_index.keys():
+ # handle library blocks as special case
+ if ((len(code) != 2) or
+ (code in {
+ # libraries handled below
+ b'LI',
+ b'ID',
+ # unneeded
+ b'WM',
+ b'SN', # bScreen
+ })):
+
+ continue
+
+ # if VERBOSE:
+ # print(" Scanning", code)
+
+ for block in iter_blocks_id(code):
+ yield from FilePath.from_block(block, basedir, extra_info, level)
+
+ # print("A:", expand_addr_visit)
+ # print("B:", block_codes)
+ if VERBOSE:
+ log_deps.info("%s%s" % (indent_str, set_as_str(expand_addr_visit)))
+
+ if recursive:
+
+ if expand_codes_idlib is None:
+ expand_codes_idlib = {}
+ for block in blend.find_blocks_from_code(b'ID'):
+ expand_codes_idlib.setdefault(block[b'lib'], set()).add(block[b'name'])
+
+ # look into libraries
+ lib_all = []
+
+ for lib_id, lib_block_codes in sorted(expand_codes_idlib.items()):
+ lib = blend.find_block_from_offset(lib_id)
+ lib_path = lib[b'name']
+
+ # get all data needed to read the blend files here (it will be freed!)
+ # lib is an address at the moment, we only use as a way to group
+
+ lib_all.append((lib_path, lib_block_codes))
+ # import IPython; IPython.embed()
+
+ # ensure we expand indirect linked libs
+ if block_codes_idlib is not None:
+ block_codes_idlib.add(lib_path)
+
+ # do this after, incase we mangle names above
+ for block in iter_blocks_idlib():
+ yield from FilePath.from_block(block, basedir, extra_info, level)
+ del blend
+
+
+ # ----------------
+ # Handle Recursive
+ if recursive:
+ # now we've closed the file, loop on other files
+
+ # note, sorting - isn't needed, it just gives predictable load-order.
+ for lib_path, lib_block_codes in lib_all:
+ lib_path_abs = os.path.normpath(utils.compatpath(utils.abspath(lib_path, basedir)))
+
+ # if we visited this before,
+ # check we don't follow the same links more than once
+ lib_block_codes_existing = lib_visit.setdefault(lib_path_abs, set())
+ lib_block_codes -= lib_block_codes_existing
+
+ # don't touch them again
+ # XXX, this is now maintained in "_expand_generic_material"
+ # lib_block_codes_existing.update(lib_block_codes)
+
+ # print("looking for", lib_block_codes)
+
+ if not lib_block_codes:
+ if VERBOSE:
+ print((indent_str + " "), "Library Skipped (visited): ", filepath, " -> ", lib_path_abs, sep="")
+ continue
+
+ if not os.path.exists(lib_path_abs):
+ if VERBOSE:
+ print((indent_str + " "), "Library Missing: ", filepath, " -> ", lib_path_abs, sep="")
+ continue
+
+ # import IPython; IPython.embed()
+ if VERBOSE:
+ print((indent_str + " "), "Library: ", filepath, " -> ", lib_path_abs, sep="")
+ # print((indent_str + " "), lib_block_codes)
+ yield from FilePath.visit_from_blend(
+ lib_path_abs,
+ readonly=readonly,
+ temp_remap_cb=temp_remap_cb,
+ recursive=True,
+ block_codes=lib_block_codes,
+ rootdir=rootdir,
+ level=level + 1,
+ lib_visit=lib_visit,
+ blendfile_level_cb=blendfile_level_cb,
+ )
+
+ if blendfile_level_cb_exit is not None:
+ blendfile_level_cb_exit(filepath)
+
+ # ------------------------------------------------------------------------
+ # Direct filepaths from Blocks
+ #
+ # (no expanding or following references)
+
+ @staticmethod
+ def from_block(block, basedir, extra_info, level):
+ assert(block.code != b'DATA')
+ fn = FilePath._from_block_dict.get(block.code)
+ if fn is not None:
+ yield from fn(block, basedir, extra_info, level)
+
+ @staticmethod
+ def _from_block_OB(block, basedir, extra_info, level):
+ # 'ob->modifiers[...].filepath'
+ for block_mod in bf_utils.iter_ListBase(
+ block.get_pointer((b'modifiers', b'first')),
+ next_item=(b'modifier', b'next')):
+ item_md_type = block_mod[b'modifier', b'type']
+ if item_md_type == C_defs.eModifierType_MeshCache:
+ yield FPElem_block_path(basedir, level, (block_mod, b'filepath')), extra_info
+
+ @staticmethod
+ def _from_block_MC(block, basedir, extra_info, level):
+ # TODO, image sequence
+ fp = FPElem_block_path(basedir, level, (block, b'name'))
+ fp.is_sequence = True
+ yield fp, extra_info
+
+ @staticmethod
+ def _from_block_IM(block, basedir, extra_info, level):
+ # old files miss this
+ image_source = block.get(b'source', C_defs.IMA_SRC_FILE)
+ if image_source not in {C_defs.IMA_SRC_FILE, C_defs.IMA_SRC_SEQUENCE, C_defs.IMA_SRC_MOVIE}:
+ return
+ if block[b'packedfile']:
+ return
+
+ fp = FPElem_block_path(basedir, level, (block, b'name'))
+ if image_source == C_defs.IMA_SRC_SEQUENCE:
+ fp.is_sequence = True
+ yield fp, extra_info
+
+ @staticmethod
+ def _from_block_VF(block, basedir, extra_info, level):
+ if block[b'packedfile']:
+ return
+ if block[b'name'] != b'<builtin>': # builtin font
+ yield FPElem_block_path(basedir, level, (block, b'name')), extra_info
+
+ @staticmethod
+ def _from_block_SO(block, basedir, extra_info, level):
+ if block[b'packedfile']:
+ return
+ yield FPElem_block_path(basedir, level, (block, b'name')), extra_info
+
+ @staticmethod
+ def _from_block_ME(block, basedir, extra_info, level):
+ block_external = block.get_pointer((b'ldata', b'external'), None)
+ if block_external is None:
+ block_external = block.get_pointer((b'fdata', b'external'), None)
+
+ if block_external is not None:
+ yield FPElem_block_path(basedir, level, (block_external, b'filename')), extra_info
+
+ if USE_ALEMBIC_BRANCH:
+ @staticmethod
+ def _from_block_CL(block, basedir, extra_info, level):
+ if block[b'source_mode'] == C_defs.CACHE_LIBRARY_SOURCE_CACHE:
+ yield FPElem_block_path(basedir, level, (block, b'input_filepath')), extra_info
+
+ @staticmethod
+ def _from_block_SC(block, basedir, extra_info, level):
+ block_ed = block.get_pointer(b'ed')
+ if block_ed is not None:
+ sdna_index_Sequence = block.file.sdna_index_from_id[b'Sequence']
+
+ def seqbase(someseq):
+ for item in someseq:
+ item_type = item.get(b'type', sdna_index_refine=sdna_index_Sequence)
+
+ if item_type >= C_defs.SEQ_TYPE_EFFECT:
+ pass
+ elif item_type == C_defs.SEQ_TYPE_META:
+ yield from seqbase(bf_utils.iter_ListBase(
+ item.get_pointer((b'seqbase', b'first'), sdna_index_refine=sdna_index_Sequence)))
+ else:
+ item_strip = item.get_pointer(b'strip', sdna_index_refine=sdna_index_Sequence)
+ if item_strip is None: # unlikely!
+ continue
+ item_stripdata = item_strip.get_pointer(b'stripdata')
+
+ if item_type == C_defs.SEQ_TYPE_IMAGE:
+ yield FPElem_sequence_image_seq(
+ basedir, level, (item_strip, b'dir', item_stripdata, b'name')), extra_info
+ elif item_type in {C_defs.SEQ_TYPE_MOVIE, C_defs.SEQ_TYPE_SOUND_RAM, C_defs.SEQ_TYPE_SOUND_HD}:
+ yield FPElem_sequence_single(
+ basedir, level, (item_strip, b'dir', item_stripdata, b'name')), extra_info
+
+ yield from seqbase(bf_utils.iter_ListBase(block_ed.get_pointer((b'seqbase', b'first'))))
+
+ @staticmethod
+ def _from_block_LI(block, basedir, extra_info, level):
+ if block.get(b'packedfile', None):
+ return
+
+ yield FPElem_block_path(basedir, level, (block, b'name')), extra_info
+
+ # _from_block_IM --> {b'IM': _from_block_IM, ...}
+ _from_block_dict = {
+ k.rpartition("_")[2].encode('ascii'): s_fn.__func__ for k, s_fn in locals().items()
+ if isinstance(s_fn, staticmethod)
+ if k.startswith("_from_block_")
+ }
+
+
+class bf_utils:
+ @staticmethod
+ def iter_ListBase(block, next_item=b'next'):
+ while block:
+ yield block
+ block = block.file.find_block_from_offset(block[next_item])
+
+ def iter_array(block, length=-1):
+ assert(block.code == b'DATA')
+ import blendfile
+ import os
+ handle = block.file.handle
+ header = block.file.header
+
+ for i in range(length):
+ block.file.handle.seek(block.file_offset + (header.pointer_size * i), os.SEEK_SET)
+ offset = blendfile.DNA_IO.read_pointer(handle, header)
+ sub_block = block.file.find_block_from_offset(offset)
+ yield sub_block
+
+
+# -----------------------------------------------------------------------------
+# ID Expand
+
+class ExpandID:
+ # fake module
+ #
+ # TODO:
+ #
+ # Array lookups here are _WAY_ too complicated,
+ # we need some nicer way to represent pointer indirection (easy like in C!)
+ # but for now, use what we have.
+ #
+ __slots__ = ()
+
+ def __new__(cls, *args, **kwargs):
+ raise RuntimeError("%s should not be instantiated" % cls)
+
+ @staticmethod
+ def _expand_generic_material(block):
+ array_len = block.get(b'totcol')
+ if array_len != 0:
+ array = block.get_pointer(b'mat')
+ for sub_block in bf_utils.iter_array(array, array_len):
+ yield sub_block
+
+ @staticmethod
+ def _expand_generic_mtex(block):
+ field = block.dna_type.field_from_name[b'mtex']
+ array_len = field.dna_size // block.file.header.pointer_size
+
+ for i in range(array_len):
+ item = block.get_pointer((b'mtex', i))
+ if item:
+ yield item.get_pointer(b'tex')
+ yield item.get_pointer(b'object')
+
+ @staticmethod
+ def _expand_generic_nodetree(block):
+ assert(block.dna_type.dna_type_id == b'bNodeTree')
+
+ sdna_index_bNode = block.file.sdna_index_from_id[b'bNode']
+ for item in bf_utils.iter_ListBase(block.get_pointer((b'nodes', b'first'))):
+ item_type = item.get(b'type', sdna_index_refine=sdna_index_bNode)
+
+ if item_type != 221: # CMP_NODE_R_LAYERS
+ yield item.get_pointer(b'id', sdna_index_refine=sdna_index_bNode)
+
+ def _expand_generic_nodetree_id(block):
+ block_ntree = block.get_pointer(b'nodetree', None)
+ if block_ntree is not None:
+ yield from ExpandID._expand_generic_nodetree(block_ntree)
+
+ @staticmethod
+ def _expand_generic_animdata(block):
+ block_adt = block.get_pointer(b'adt')
+ if block_adt:
+ yield block_adt.get_pointer(b'action')
+ # TODO, NLA
+
+ @staticmethod
+ def expand_OB(block): # 'Object'
+ yield from ExpandID._expand_generic_animdata(block)
+ yield from ExpandID._expand_generic_material(block)
+
+ has_dup_group = False
+ yield block.get_pointer(b'data')
+ if block[b'transflag'] & C_defs.OB_DUPLIGROUP:
+ dup_group = block.get_pointer(b'dup_group')
+ if dup_group is not None:
+ has_dup_group = True
+ yield dup_group
+ del dup_group
+
+ yield block.get_pointer(b'proxy')
+ yield block.get_pointer(b'proxy_group')
+
+ if USE_ALEMBIC_BRANCH:
+ if has_dup_group:
+ sdna_index_CacheLibrary = block.file.sdna_index_from_id.get(b'CacheLibrary')
+ if sdna_index_CacheLibrary is not None:
+ yield block.get_pointer(b'cache_library')
+
+ # 'ob->pose->chanbase[...].custom'
+ block_pose = block.get_pointer(b'pose')
+ if block_pose is not None:
+ assert(block_pose.dna_type.dna_type_id == b'bPose')
+ sdna_index_bPoseChannel = block_pose.file.sdna_index_from_id[b'bPoseChannel']
+ for item in bf_utils.iter_ListBase(block_pose.get_pointer((b'chanbase', b'first'))):
+ item_custom = item.get_pointer(b'custom', sdna_index_refine=sdna_index_bPoseChannel)
+ if item_custom is not None:
+ yield item_custom
+ # Expand the objects 'ParticleSettings' via:
+ # 'ob->particlesystem[...].part'
+ sdna_index_ParticleSystem = block.file.sdna_index_from_id.get(b'ParticleSystem')
+ if sdna_index_ParticleSystem is not None:
+ for item in bf_utils.iter_ListBase(
+ block.get_pointer((b'particlesystem', b'first'))):
+ item_part = item.get_pointer(b'part', sdna_index_refine=sdna_index_ParticleSystem)
+ if item_part is not None:
+ yield item_part
+
+ @staticmethod
+ def expand_ME(block): # 'Mesh'
+ yield from ExpandID._expand_generic_animdata(block)
+ yield from ExpandID._expand_generic_material(block)
+ yield block.get_pointer(b'texcomesh')
+ # TODO, TexFace? - it will be slow, we could simply ignore :S
+
+ @staticmethod
+ def expand_CU(block): # 'Curve'
+ yield from ExpandID._expand_generic_animdata(block)
+ yield from ExpandID._expand_generic_material(block)
+
+ sub_block = block.get_pointer(b'vfont')
+ if sub_block is not None:
+ yield sub_block
+ yield block.get_pointer(b'vfontb')
+ yield block.get_pointer(b'vfonti')
+ yield block.get_pointer(b'vfontbi')
+
+ yield block.get_pointer(b'bevobj')
+ yield block.get_pointer(b'taperobj')
+ yield block.get_pointer(b'textoncurve')
+
+ @staticmethod
+ def expand_MB(block): # 'MBall'
+ yield from ExpandID._expand_generic_animdata(block)
+ yield from ExpandID._expand_generic_material(block)
+
+ @staticmethod
+ def expand_AR(block): # 'bArmature'
+ yield from ExpandID._expand_generic_animdata(block)
+
+ @staticmethod
+ def expand_LA(block): # 'Lamp'
+ yield from ExpandID._expand_generic_animdata(block)
+ yield from ExpandID._expand_generic_nodetree_id(block)
+ yield from ExpandID._expand_generic_mtex(block)
+
+ @staticmethod
+ def expand_MA(block): # 'Material'
+ yield from ExpandID._expand_generic_animdata(block)
+ yield from ExpandID._expand_generic_nodetree_id(block)
+ yield from ExpandID._expand_generic_mtex(block)
+
+ yield block.get_pointer(b'group')
+
+ @staticmethod
+ def expand_TE(block): # 'Tex'
+ yield from ExpandID._expand_generic_animdata(block)
+ yield from ExpandID._expand_generic_nodetree_id(block)
+ yield block.get_pointer(b'ima')
+
+ @staticmethod
+ def expand_WO(block): # 'World'
+ yield from ExpandID._expand_generic_animdata(block)
+ yield from ExpandID._expand_generic_nodetree_id(block)
+ yield from ExpandID._expand_generic_mtex(block)
+
+ @staticmethod
+ def expand_NT(block): # 'bNodeTree'
+ yield from ExpandID._expand_generic_animdata(block)
+ yield from ExpandID._expand_generic_nodetree(block)
+
+ @staticmethod
+ def expand_PA(block): # 'ParticleSettings'
+ yield from ExpandID._expand_generic_animdata(block)
+ block_ren_as = block[b'ren_as']
+ if block_ren_as == C_defs.PART_DRAW_GR:
+ yield block.get_pointer(b'dup_group')
+ elif block_ren_as == C_defs.PART_DRAW_OB:
+ yield block.get_pointer(b'dup_ob')
+
+ @staticmethod
+ def expand_SC(block): # 'Scene'
+ yield from ExpandID._expand_generic_animdata(block)
+ yield from ExpandID._expand_generic_nodetree_id(block)
+ yield block.get_pointer(b'camera')
+ yield block.get_pointer(b'world')
+ yield block.get_pointer(b'set', None)
+ yield block.get_pointer(b'clip', None)
+
+ sdna_index_Base = block.file.sdna_index_from_id[b'Base']
+ for item in bf_utils.iter_ListBase(block.get_pointer((b'base', b'first'))):
+ yield item.get_pointer(b'object', sdna_index_refine=sdna_index_Base)
+
+ block_ed = block.get_pointer(b'ed')
+ if block_ed is not None:
+ sdna_index_Sequence = block.file.sdna_index_from_id[b'Sequence']
+
+ def seqbase(someseq):
+ for item in someseq:
+ item_type = item.get(b'type', sdna_index_refine=sdna_index_Sequence)
+
+ if item_type >= C_defs.SEQ_TYPE_EFFECT:
+ pass
+ elif item_type == C_defs.SEQ_TYPE_META:
+ yield from seqbase(bf_utils.iter_ListBase(
+ item.get_pointer((b'seqbase' b'first'), sdna_index_refine=sdna_index_Sequence)))
+ else:
+ if item_type == C_defs.SEQ_TYPE_SCENE:
+ yield item.get_pointer(b'scene')
+ elif item_type == C_defs.SEQ_TYPE_MOVIECLIP:
+ yield item.get_pointer(b'clip')
+ elif item_type == C_defs.SEQ_TYPE_MASK:
+ yield item.get_pointer(b'mask')
+ elif item_type == C_defs.SEQ_TYPE_SOUND_RAM:
+ yield item.get_pointer(b'sound')
+
+ yield from seqbase(bf_utils.iter_ListBase(
+ block_ed.get_pointer((b'seqbase', b'first'))))
+
+ @staticmethod
+ def expand_GR(block): # 'Group'
+ sdna_index_GroupObject = block.file.sdna_index_from_id[b'GroupObject']
+ for item in bf_utils.iter_ListBase(block.get_pointer((b'gobject', b'first'))):
+ yield item.get_pointer(b'ob', sdna_index_refine=sdna_index_GroupObject)
+
+ # expand_GR --> {b'GR': expand_GR, ...}
+ expand_funcs = {
+ k.rpartition("_")[2].encode('ascii'): s_fn.__func__ for k, s_fn in locals().items()
+ if isinstance(s_fn, staticmethod)
+ if k.startswith("expand_")
+ }
+
+
+# -----------------------------------------------------------------------------
+# Packing Utility
+
+
+class utils:
+ # fake module
+ __slots__ = ()
+
+ def __new__(cls, *args, **kwargs):
+ raise RuntimeError("%s should not be instantiated" % cls)
+
+ @staticmethod
+ def abspath(path, start, library=None):
+ import os
+ if path.startswith(b'//'):
+ # if library:
+ # start = os.path.dirname(abspath(library.filepath))
+ return os.path.join(start, path[2:])
+ return path
+
+ if __import__("os").sep == '/':
+ @staticmethod
+ def compatpath(path):
+ return path.replace(b'\\', b'/')
+ else:
+ @staticmethod
+ def compatpath(path):
+ # keep '//'
+ return path[:2] + path[2:].replace(b'/', b'\\')
+
+ @staticmethod
+ def splitpath(path):
+ """
+ Splits the path using either slashes
+ """
+ split1 = path.rpartition(b'/')
+ split2 = path.rpartition(b'\\')
+ if len(split1[0]) > len(split2[0]):
+ return split1
+ else:
+ return split2
+
+ def find_sequence_paths(filepath, use_fullpath=True):
+ # supports str, byte paths
+ basedir, filename = os.path.split(filepath)
+ if not os.path.exists(basedir):
+ return []
+
+ filename_noext, ext = os.path.splitext(filename)
+
+ from string import digits
+ if isinstance(filepath, bytes):
+ digits = digits.encode()
+ filename_nodigits = filename_noext.rstrip(digits)
+
+ if len(filename_nodigits) == len(filename_noext):
+ # input isn't from a sequence
+ return []
+
+ files = os.listdir(basedir)
+ files[:] = [
+ f for f in files
+ if f.startswith(filename_nodigits) and
+ f.endswith(ext) and
+ f[len(filename_nodigits):-len(ext) if ext else -1].isdigit()
+ ]
+ if use_fullpath:
+ files[:] = [
+ os.path.join(basedir, f) for f in files
+ ]
+
+ return files
diff --git a/io_blend_utils/blendfile_pack.py b/io_blend_utils/blendfile_pack.py
new file mode 100755
index 00000000..225a941d
--- /dev/null
+++ b/io_blend_utils/blendfile_pack.py
@@ -0,0 +1,601 @@
+#!/usr/bin/env python3
+
+# ***** BEGIN GPL LICENSE BLOCK *****
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License
+# as published by the Free Software Foundation; either version 2
+# of the License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
+#
+# ***** END GPL LICENCE BLOCK *****
+
+from blend import blendfile_path_walker
+
+TIMEIT = False
+
+
+# ----------------------
+# debug low level output
+#
+# ... when internals _really_ fail & we want to know why
+def _dbg(text):
+ import sys
+ from utils.system import colorize
+ if type(text) is bytes:
+ text = text.decode('utf-8')
+ sys.__stdout__.write(colorize(text, color='red') + "\n")
+ sys.__stdout__.flush()
+
+
+def _relpath_remap(
+ path_src,
+ base_dir_src,
+ fp_basedir,
+ blendfile_src_dir_fakeroot,
+ ):
+
+ import os
+
+ if not os.path.isabs(path_src):
+ # Absolute win32 paths on a unix system
+ # cause bad issues!
+ if len(path_src) >= 2:
+ if path_src[0] != b'/'[0] and path_src[1] == b':'[0]:
+ pass
+ else:
+ raise Exception("Internal error 'path_src' -> %r must be absolute" % path_src)
+
+ path_src = os.path.normpath(path_src)
+ path_dst = os.path.relpath(path_src, base_dir_src)
+
+ if blendfile_src_dir_fakeroot is None:
+ # /foo/../bar.png --> /foo/__/bar.png
+ path_dst = path_dst.replace(b'..', b'__')
+ path_dst = os.path.normpath(path_dst)
+ else:
+ if b'..' in path_dst:
+ # remap, relative to project root
+
+ # paths
+ path_dst = os.path.join(blendfile_src_dir_fakeroot, path_dst)
+ path_dst = os.path.normpath(path_dst)
+ # if there are paths outside the root still...
+ # This means they are outside the project directory, We dont support this,
+ # so name accordingly
+ if b'..' in path_dst:
+ # SHOULD NEVER HAPPEN
+ path_dst = path_dst.replace(b'..', b'__nonproject__')
+ path_dst = b'_' + path_dst
+
+ # _dbg(b"FINAL A: " + path_dst)
+ path_dst_final = os.path.join(os.path.relpath(base_dir_src, fp_basedir), path_dst)
+ path_dst_final = os.path.normpath(path_dst_final)
+ # _dbg(b"FINAL B: " + path_dst_final)
+
+ return path_dst, path_dst_final
+
+
+def pack(
+ # store the blendfile relative to this directory, can be:
+ # os.path.dirname(blendfile_src)
+ # but in some cases we wan't to use a path higher up.
+ # base_dir_src,
+ blendfile_src, blendfile_dst,
+ mode='ZIP',
+ # optionally pass in the temp dir
+ base_dir_dst_temp=None,
+ paths_remap_relbase=None,
+ deps_remap=None, paths_remap=None, paths_uuid=None,
+ # load every libs dep, not just used deps.
+ all_deps=False,
+ compress_level=-1,
+ # yield reports
+ report=None,
+
+ # The project path, eg:
+ # /home/me/myproject/mysession/path/to/blend/file.blend
+ # the path would be: b'path/to/blend'
+ #
+ # This is needed so we can choose to store paths
+ # relative to project or relative to the current file.
+ #
+ # When None, map _all_ paths are relative to the current blend.
+ # converting: '../../bar' --> '__/__/bar'
+ # so all paths are nested and not moved outside the session path.
+ blendfile_src_dir_fakeroot=None,
+
+ # Read variations from json files.
+ use_variations=False,
+
+ # do _everything_ except to write the paths.
+ # useful if we want to calculate deps to remap but postpone applying them.
+ readonly=False,
+ # dict of binary_edits:
+ # {file: [(ofs, bytes), ...], ...}
+ # ... where the file is the relative 'packed' location.
+ binary_edits=None,
+
+ # Filename filter, allow to exclude files from the pack,
+ # function takes a string returns True if the files should be included.
+ filename_filter=None,
+ ):
+ """
+ :param deps_remap: Store path deps_remap info as follows.
+ {"file.blend": {"path_new": "path_old", ...}, ...}
+
+ :type deps_remap: dict or None
+ """
+
+ # Internal details:
+ # - we copy to a temp path before operating on the blend file
+ # so we can modify in-place.
+ # - temp files are only created once, (if we never touched them before),
+ # this way, for linked libraries - a single blend file may be used
+ # multiple times, each access will apply new edits on top of the old ones.
+ # - we track which libs we have touched (using 'lib_visit' arg),
+ # this means that the same libs wont be touched many times to modify the same data
+ # also prevents cyclic loops from crashing.
+
+ import os
+ import sys
+
+ if sys.stdout.isatty():
+ from utils.system import colorize
+ else:
+ from utils.system import colorize_dummy as colorize
+
+ # in case this is directly from the command line or user-input
+ blendfile_src = os.path.normpath(os.path.abspath(blendfile_src))
+ blendfile_dst = os.path.normpath(os.path.abspath(blendfile_dst))
+
+ # first check args are OK
+ # fakeroot _cant_ start with a separator, since we prepend chars to it.
+ assert((blendfile_src_dir_fakeroot is None) or
+ (not blendfile_src_dir_fakeroot.startswith(os.sep.encode('ascii'))))
+
+ path_temp_files = set()
+ path_copy_files = set()
+
+ # path_temp_files --> original-location
+ path_temp_files_orig = {}
+
+ TEMP_SUFFIX = b'@'
+
+ if report is None:
+ def report(msg):
+ return msg
+
+ yield report("%s: %r...\n" % (colorize("\nscanning deps", color='bright_green'), blendfile_src))
+
+ if TIMEIT:
+ import time
+ t = time.time()
+
+ base_dir_src = os.path.dirname(blendfile_src)
+ base_dir_dst = os.path.dirname(blendfile_dst)
+ # _dbg(blendfile_src)
+ # _dbg(blendfile_dst)
+
+ if base_dir_dst_temp is None:
+ base_dir_dst_temp = base_dir_dst
+
+ if mode == 'ZIP':
+ base_dir_dst_temp = os.path.join(base_dir_dst_temp, b'__blendfile_temp__')
+ else:
+ base_dir_dst_temp = os.path.join(base_dir_dst_temp, b'__blendfile_pack__')
+
+ def temp_remap_cb(filepath, rootdir):
+ """
+ Create temp files in the destination path.
+ """
+ filepath = blendfile_path_walker.utils.compatpath(filepath)
+
+ if use_variations:
+ if blendfile_levels_dict_curr:
+ filepath = blendfile_levels_dict_curr.get(filepath, filepath)
+
+ # ...
+
+ # first remap this blend file to the location it will end up (so we can get images relative to _that_)
+ # TODO(cam) cache the results
+ fp_basedir_conv = _relpath_remap(os.path.join(rootdir, b'dummy'), base_dir_src, base_dir_src, blendfile_src_dir_fakeroot)[0]
+ fp_basedir_conv = os.path.join(base_dir_src, os.path.dirname(fp_basedir_conv))
+
+ # then get the file relative to the new location
+ filepath_tmp = _relpath_remap(filepath, base_dir_src, fp_basedir_conv, blendfile_src_dir_fakeroot)[0]
+ filepath_tmp = os.path.normpath(os.path.join(base_dir_dst_temp, filepath_tmp)) + TEMP_SUFFIX
+
+ # only overwrite once (so we can write into a path already containing files)
+ if filepath_tmp not in path_temp_files:
+ if mode != 'NONE':
+ import shutil
+ os.makedirs(os.path.dirname(filepath_tmp), exist_ok=True)
+ shutil.copy(filepath, filepath_tmp)
+ path_temp_files.add(filepath_tmp)
+ path_temp_files_orig[filepath_tmp] = filepath
+ if mode != 'NONE':
+ return filepath_tmp
+ else:
+ return filepath
+
+ # -----------------
+ # Variation Support
+ #
+ # Use a json file to allow recursive-remapping of variations.
+ #
+ # file_a.blend
+ # file_a.json '{"variations": ["tree.blue.blend", ...]}'
+ # file_a.blend -> file_b.blend
+ # file_b.blend --> tree.blend
+ #
+ # the variation of `file_a.blend` causes `file_b.blend`
+ # to link in `tree.blue.blend`
+
+ if use_variations:
+ blendfile_levels = []
+ blendfile_levels_dict = []
+ blendfile_levels_dict_curr = {}
+
+ def blendfile_levels_rebuild():
+ # after changing blend file configurations,
+ # re-create current variation lookup table
+ blendfile_levels_dict_curr.clear()
+ for d in blendfile_levels_dict:
+ if d is not None:
+ blendfile_levels_dict_curr.update(d)
+
+ # use variations!
+ def blendfile_level_cb_enter(filepath):
+ import json
+
+ filepath_json = os.path.splitext(filepath)[0] + b".json"
+ if os.path.exists(filepath_json):
+ with open(filepath_json, encoding='utf-8') as f_handle:
+ variations = [f.encode("utf-8") for f in json.load(f_handle).get("variations")]
+ # convert to absolute paths
+ basepath = os.path.dirname(filepath)
+ variations = {
+ # Reverse lookup, from non-variation to variation we specify in this file.
+ # {"/abs/path/foo.png": "/abs/path/foo.variation.png", ...}
+ # .. where the input _is_ the variation,
+ # we just make it absolute and use the non-variation as
+ # the key to the variation value.
+ b".".join(f.rsplit(b".", 2)[0::2]): f for f_ in variations
+ for f in (os.path.normpath(os.path.join(basepath, f_)),)
+ }
+ else:
+ variations = None
+
+ blendfile_levels.append(filepath)
+ blendfile_levels_dict.append(variations)
+
+ if variations:
+ blendfile_levels_rebuild()
+
+ def blendfile_level_cb_exit(filepath):
+ blendfile_levels.pop()
+ blendfile_levels_dict.pop()
+
+ if blendfile_levels_dict_curr:
+ blendfile_levels_rebuild()
+ else:
+ blendfile_level_cb_enter = blendfile_level_cb_exit = None
+ blendfile_levels_dict_curr = None
+
+ lib_visit = {}
+ fp_blend_basename_last = b''
+
+ for fp, (rootdir, fp_blend_basename) in blendfile_path_walker.FilePath.visit_from_blend(
+ blendfile_src,
+ readonly=readonly,
+ temp_remap_cb=temp_remap_cb,
+ recursive=True,
+ recursive_all=all_deps,
+ lib_visit=lib_visit,
+ blendfile_level_cb=(
+ blendfile_level_cb_enter,
+ blendfile_level_cb_exit,
+ )
+ ):
+
+ # we could pass this in!
+ fp_blend = os.path.join(fp.basedir, fp_blend_basename)
+
+ if fp_blend_basename_last != fp_blend_basename:
+ yield report(" %s: %s\n" % (colorize("blend", color='blue'), fp_blend))
+ fp_blend_basename_last = fp_blend_basename
+
+ if binary_edits is not None:
+ # TODO, temp_remap_cb makes paths, this isn't ideal,
+ # in this case we only want to remap!
+ if mode == 'NONE':
+ tmp = temp_remap_cb(fp_blend, base_dir_src)
+ tmp = os.path.relpath(tmp, base_dir_src)
+ else:
+ tmp = temp_remap_cb(fp_blend, base_dir_src)
+ tmp = os.path.relpath(tmp[:-len(TEMP_SUFFIX)], base_dir_dst_temp)
+ binary_edits_curr = binary_edits.setdefault(tmp, [])
+ del tmp
+
+ # assume the path might be relative
+ path_src_orig = fp.filepath
+ path_rel = blendfile_path_walker.utils.compatpath(path_src_orig)
+ path_src = blendfile_path_walker.utils.abspath(path_rel, fp.basedir)
+ path_src = os.path.normpath(path_src)
+
+ if filename_filter and not filename_filter(path_src):
+ yield report(" %s: %r\n" % (colorize("exclude", color='yellow'), path_src))
+ continue
+
+ # apply variation (if available)
+ if use_variations:
+ if blendfile_levels_dict_curr:
+ path_src_variation = blendfile_levels_dict_curr.get(path_src)
+ if path_src_variation is not None:
+ path_src = path_src_variation
+ path_rel = os.path.join(os.path.dirname(path_rel), os.path.basename(path_src))
+ del path_src_variation
+
+ # destination path realtive to the root
+ # assert(b'..' not in path_src)
+ assert(b'..' not in base_dir_src)
+
+ # first remap this blend file to the location it will end up (so we can get images relative to _that_)
+ # TODO(cam) cache the results
+ fp_basedir_conv = _relpath_remap(fp_blend, base_dir_src, base_dir_src, blendfile_src_dir_fakeroot)[0]
+ fp_basedir_conv = os.path.join(base_dir_src, os.path.dirname(fp_basedir_conv))
+
+ # then get the file relative to the new location
+ path_dst, path_dst_final = _relpath_remap(path_src, base_dir_src, fp_basedir_conv, blendfile_src_dir_fakeroot)
+
+ path_dst = os.path.join(base_dir_dst, path_dst)
+
+ path_dst_final = b'//' + path_dst_final
+
+ # Assign direct or add to edit-list (to apply later)
+ if not readonly:
+ fp.filepath = path_dst_final
+ if binary_edits is not None:
+ fp.filepath_assign_edits(path_dst_final, binary_edits_curr)
+
+ # add to copy-list
+ # never copy libs (handled separately)
+ if not isinstance(fp, blendfile_path_walker.FPElem_block_path) or fp.userdata[0].code != b'LI':
+ path_copy_files.add((path_src, path_dst))
+
+ for file_list in (
+ blendfile_path_walker.utils.find_sequence_paths(path_src) if fp.is_sequence else (),
+ fp.files_siblings(),
+ ):
+
+ _src_dir = os.path.dirname(path_src)
+ _dst_dir = os.path.dirname(path_dst)
+ path_copy_files.update(
+ {(os.path.join(_src_dir, f), os.path.join(_dst_dir, f))
+ for f in file_list
+ })
+ del _src_dir, _dst_dir
+
+ if deps_remap is not None:
+ # this needs to become JSON later... ugh, need to use strings
+ deps_remap.setdefault(
+ fp_blend_basename.decode('utf-8'),
+ {})[path_dst_final.decode('utf-8')] = path_src_orig.decode('utf-8')
+
+ del lib_visit, fp_blend_basename_last
+
+ if TIMEIT:
+ print(" Time: %.4f\n" % (time.time() - t))
+
+ yield report(("%s: %d files\n") %
+ (colorize("\narchiving", color='bright_green'), len(path_copy_files) + 1))
+
+ # handle deps_remap and file renaming
+ if deps_remap is not None:
+ blendfile_src_basename = os.path.basename(blendfile_src).decode('utf-8')
+ blendfile_dst_basename = os.path.basename(blendfile_dst).decode('utf-8')
+
+ if blendfile_src_basename != blendfile_dst_basename:
+ if mode == 'FILE':
+ deps_remap[blendfile_dst_basename] = deps_remap[blendfile_src_basename]
+ del deps_remap[blendfile_src_basename]
+ del blendfile_src_basename, blendfile_dst_basename
+
+ # store path mapping {dst: src}
+ if paths_remap is not None:
+
+ if paths_remap_relbase is not None:
+ def relbase(fn):
+ return os.path.relpath(fn, paths_remap_relbase)
+ else:
+ def relbase(fn):
+ return fn
+
+ for src, dst in path_copy_files:
+ # TODO. relative to project-basepath
+ paths_remap[os.path.relpath(dst, base_dir_dst).decode('utf-8')] = relbase(src).decode('utf-8')
+ # main file XXX, should have better way!
+ paths_remap[os.path.basename(blendfile_src).decode('utf-8')] = relbase(blendfile_src).decode('utf-8')
+
+ # blend libs
+ for dst in path_temp_files:
+ src = path_temp_files_orig[dst]
+ k = os.path.relpath(dst[:-len(TEMP_SUFFIX)], base_dir_dst_temp).decode('utf-8')
+ paths_remap[k] = relbase(src).decode('utf-8')
+ del k
+
+ del relbase
+
+ if paths_uuid is not None:
+ from utils.system import uuid_from_file
+
+ for src, dst in path_copy_files:
+ # reports are handled again, later on.
+ if os.path.exists(src):
+ paths_uuid[os.path.relpath(dst, base_dir_dst).decode('utf-8')] = uuid_from_file(src)
+ # XXX, better way to store temp target
+ blendfile_dst_tmp = temp_remap_cb(blendfile_src, base_dir_src)
+ paths_uuid[os.path.basename(blendfile_src).decode('utf-8')] = uuid_from_file(blendfile_dst_tmp)
+
+ # blend libs
+ for dst in path_temp_files:
+ k = os.path.relpath(dst[:-len(TEMP_SUFFIX)], base_dir_dst_temp).decode('utf-8')
+ if k not in paths_uuid:
+ if mode == 'NONE':
+ dst = path_temp_files_orig[dst]
+ paths_uuid[k] = uuid_from_file(dst)
+ del k
+
+ del blendfile_dst_tmp
+ del uuid_from_file
+
+ # --------------------
+ # Handle File Copy/Zip
+
+ if mode == 'FILE':
+ import shutil
+ blendfile_dst_tmp = temp_remap_cb(blendfile_src, base_dir_src)
+
+ shutil.move(blendfile_dst_tmp, blendfile_dst)
+ path_temp_files.remove(blendfile_dst_tmp)
+
+ # strip TEMP_SUFFIX
+ for fn in path_temp_files:
+ shutil.move(fn, fn[:-len(TEMP_SUFFIX)])
+
+ for src, dst in path_copy_files:
+ assert(b'.blend' not in dst)
+
+ # in rare cases a filepath could point to a directory
+ if (not os.path.exists(src)) or os.path.isdir(src):
+ yield report(" %s: %r\n" % (colorize("source missing", color='red'), src))
+ else:
+ yield report(" %s: %r -> %r\n" % (colorize("copying", color='blue'), src, dst))
+ shutil.copy(src, dst)
+
+ yield report(" %s: %r\n" % (colorize("written", color='green'), blendfile_dst))
+
+ elif mode == 'ZIP':
+ import shutil
+ import zipfile
+
+ # not awesome!
+ import zlib
+ assert(compress_level in range(-1, 10))
+ _compress_level_orig = zlib.Z_DEFAULT_COMPRESSION
+ zlib.Z_DEFAULT_COMPRESSION = compress_level
+ _compress_mode = zipfile.ZIP_STORED if (compress_level == 0) else zipfile.ZIP_DEFLATED
+ if _compress_mode == zipfile.ZIP_STORED:
+ def is_compressed_filetype(fn):
+ return False
+ else:
+ from utils.system import is_compressed_filetype
+
+ with zipfile.ZipFile(blendfile_dst.decode('utf-8'), 'w', _compress_mode) as zip_handle:
+ for fn in path_temp_files:
+ yield report(" %s: %r -> <archive>\n" % (colorize("copying", color='blue'), fn))
+ zip_handle.write(
+ fn.decode('utf-8'),
+ arcname=os.path.relpath(fn[:-1], base_dir_dst_temp).decode('utf-8'),
+ )
+ os.remove(fn)
+
+ shutil.rmtree(base_dir_dst_temp)
+
+ for src, dst in path_copy_files:
+ assert(not dst.endswith(b'.blend'))
+
+ # in rare cases a filepath could point to a directory
+ if (not os.path.exists(src)) or os.path.isdir(src):
+ yield report(" %s: %r\n" % (colorize("source missing", color='red'), src))
+ else:
+ yield report(" %s: %r -> <archive>\n" % (colorize("copying", color='blue'), src))
+ zip_handle.write(
+ src.decode('utf-8'),
+ arcname=os.path.relpath(dst, base_dir_dst).decode('utf-8'),
+ compress_type=zipfile.ZIP_STORED if is_compressed_filetype(dst) else _compress_mode,
+ )
+
+ zlib.Z_DEFAULT_COMPRESSION = _compress_level_orig
+ del _compress_level_orig, _compress_mode
+
+ yield report(" %s: %r\n" % (colorize("written", color='green'), blendfile_dst))
+ elif mode == 'NONE':
+ pass
+ else:
+ raise Exception("%s not a known mode" % mode)
+
+
+def create_argparse():
+ import os
+ import argparse
+
+ usage_text = (
+ "Run this script to extract blend-files(s) to a destination path: " +
+ os.path.basename(__file__) +
+ " --input=FILE --output=FILE [options]")
+
+ parser = argparse.ArgumentParser(description=usage_text)
+
+ # for main_render() only, but validate args.
+ parser.add_argument(
+ "-i", "--input", dest="path_src", metavar='FILE', required=True,
+ help="Input blend file",
+ )
+ parser.add_argument(
+ "-o", "--output", dest="path_dst", metavar='DIR', required=True,
+ help="Output file",
+ )
+ parser.add_argument(
+ "-m", "--mode", dest="mode", metavar='MODE', required=False,
+ choices=('FILE', 'ZIP'), default='ZIP',
+ help="Type of archive to write into",
+ )
+ parser.add_argument(
+ "-q", "--quiet", dest="use_quiet", action='store_true', required=False,
+ help="Suppress status output",
+ )
+ parser.add_argument(
+ "-t", "--temp", dest="temp_path", metavar='DIR', required=False,
+ help="Override the default temp directory",
+ )
+
+ return parser
+
+
+def main():
+ import sys
+
+ parser = create_argparse()
+ args = parser.parse_args(sys.argv[1:])
+
+ if args.use_quiet:
+ def report(msg):
+ pass
+ else:
+ def report(msg):
+ sys.stdout.write(msg)
+ sys.stdout.flush()
+
+ for msg in pack(
+ args.path_src.encode('utf-8'),
+ args.path_dst.encode('utf-8'),
+ mode=args.mode,
+ base_dir_dst_temp=(
+ args.temp_path.encode('utf-8')
+ if args.temp_path else None),
+ ):
+ report(msg)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/io_blend_utils/utils/system.py b/io_blend_utils/utils/system.py
new file mode 100644
index 00000000..970a6464
--- /dev/null
+++ b/io_blend_utils/utils/system.py
@@ -0,0 +1,105 @@
+# ***** BEGIN GPL LICENSE BLOCK *****
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License
+# as published by the Free Software Foundation; either version 2
+# of the License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
+#
+# ***** END GPL LICENCE BLOCK *****
+
+def colorize_dummy(msg, color=None):
+ return msg
+
+_USE_COLOR = True
+if _USE_COLOR:
+ color_codes = {
+ 'black': '\033[0;30m',
+ 'bright_gray': '\033[0;37m',
+ 'blue': '\033[0;34m',
+ 'white': '\033[1;37m',
+ 'green': '\033[0;32m',
+ 'bright_blue': '\033[1;34m',
+ 'cyan': '\033[0;36m',
+ 'bright_green': '\033[1;32m',
+ 'red': '\033[0;31m',
+ 'bright_cyan': '\033[1;36m',
+ 'purple': '\033[0;35m',
+ 'bright_red': '\033[1;31m',
+ 'yellow': '\033[0;33m',
+ 'bright_purple':'\033[1;35m',
+ 'dark_gray': '\033[1;30m',
+ 'bright_yellow':'\033[1;33m',
+ 'normal': '\033[0m',
+ }
+
+ def colorize(msg, color=None):
+ return (color_codes[color] + msg + color_codes['normal'])
+else:
+ colorize = colorize_dummy
+
+
+def uuid_from_file(fn, block_size=1 << 20):
+ """
+ Returns an arbitrary sized unique ASCII string based on the file contents.
+ (exact hashing method may change).
+ """
+ with open(fn, 'rb') as f:
+ # first get the size
+ import os
+ f.seek(0, os.SEEK_END)
+ size = f.tell()
+ f.seek(0, os.SEEK_SET)
+ del os
+ # done!
+
+ import hashlib
+ sha1 = hashlib.new('sha512')
+ while True:
+ data = f.read(block_size)
+ if not data:
+ break
+ sha1.update(data)
+ # skip the '0x'
+ return hex(size)[2:] + sha1.hexdigest()
+
+
+def is_compressed_filetype(filepath):
+ """
+ Use to check if we should compress files in a zip.
+ """
+ # for now, only include files which Blender is likely to reference
+ import os
+ assert(isinstance(filepath, bytes))
+ return os.path.splitext(filepath)[1].lower() in {
+ # images
+ b'.exr',
+ b'.jpg', b'.jpeg',
+ b'.png',
+
+ # audio
+ b'.aif', b'.aiff',
+ b'.mp3',
+ b'.ogg', b'.ogv',
+ b'.wav',
+
+ # video
+ b'.avi',
+ b'.mkv',
+ b'.mov',
+ b'.mpg', b'.mpeg',
+
+ # archives
+ # '.bz2', '.tbz',
+ # '.gz', '.tgz',
+ # '.zip',
+ }
+
diff --git a/io_curve_svg/import_svg.py b/io_curve_svg/import_svg.py
index 611498aa..a1df2204 100644
--- a/io_curve_svg/import_svg.py
+++ b/io_curve_svg/import_svg.py
@@ -102,13 +102,10 @@ def SVGParseFloat(s, i=0):
token += s[i]
i += 1
- if s[i].isdigit():
- while i < n and s[i].isdigit():
- token += s[i]
- i += 1
- else:
- raise Exception('Invalid float value near ' +
- s[start:start + 10])
+ if s[i].isdigit():
+ while i < n and s[i].isdigit():
+ token += s[i]
+ i += 1
else:
raise Exception('Invalid float value near ' + s[start:start + 10])
@@ -1761,6 +1758,7 @@ class SVGGeometrySVG(SVGGeometryContainer):
"""
rect = SVGRectFromNode(self._node, self._context)
+ self._pushRect(rect)
matrix = self.getNodeMatrix()
@@ -1772,7 +1770,6 @@ class SVGGeometrySVG(SVGGeometryContainer):
matrix = matrix * Matrix.Translation([0.0, -document_height , 0.0])
self._pushMatrix(matrix)
- self._pushRect(rect)
super()._doCreateGeom(False)
diff --git a/io_import_images_as_planes.py b/io_import_images_as_planes.py
index 8e54c91b..c47af01d 100644
--- a/io_import_images_as_planes.py
+++ b/io_import_images_as_planes.py
@@ -19,8 +19,8 @@
bl_info = {
"name": "Import Images as Planes",
"author": "Florian Meyer (tstscr), mont29, matali",
- "version": (2, 0, 2),
- "blender": (2, 74, 0),
+ "version": (2, 0, 4),
+ "blender": (2, 76, 1),
"location": "File > Import > Images as Planes or Add > Mesh > Images as Planes",
"description": "Imports images and creates planes with the appropriate aspect ratio. "
"The images are mapped to the planes.",
@@ -76,9 +76,7 @@ VID_EXT_FILTER = {e for ext_k, ext_v in EXT_FILTER.items() if ext_k in {"avi", "
CYCLES_SHADERS = (
('BSDF_DIFFUSE', "Diffuse", "Diffuse Shader"),
- ('EMISSION', "Emission", "Emission Shader"),
- ('BSDF_DIFFUSE_BSDF_TRANSPARENT', "Diffuse & Transparent", "Diffuse and Transparent Mix"),
- ('EMISSION_BSDF_TRANSPARENT', "Emission & Transparent", "Emission and Transparent Mix")
+ ('EMISSION', "Emission", "Emission Shader")
)
# -----------------------------------------------------------------------------
@@ -179,6 +177,9 @@ class IMPORT_OT_image_to_plane(Operator, AddObjectHelper):
align_offset = FloatProperty(name="Offset", min=0, soft_min=0, default=0.1, description="Space between Planes")
+ force_reload = BoolProperty(name="Force Reload", default=False,
+ description="Force reloading of the image if already opened elsewhere in Blender")
+
# Callback which will update File window's filter options accordingly to extension setting.
def update_extensions(self, context):
if self.extension == DEFAULT_EXT:
@@ -269,6 +270,8 @@ class IMPORT_OT_image_to_plane(Operator, AddObjectHelper):
box.prop(self, "align_offset")
row = box.row()
+ row.prop(self, "force_reload")
+ row = box.row()
row.active = bpy.data.is_saved
row.prop(self, "relative")
box.prop(self, "match_len")
@@ -327,7 +330,8 @@ class IMPORT_OT_image_to_plane(Operator, AddObjectHelper):
engine = context.scene.render.engine
import_list, directory = self.generate_paths()
- images = tuple(load_image(path, directory) for path in import_list)
+ images = tuple(load_image(path, directory, check_existing=True, force_reload=self.force_reload)
+ for path in import_list)
for img in images:
self.set_image_options(img)
@@ -497,37 +501,31 @@ class IMPORT_OT_image_to_plane(Operator, AddObjectHelper):
if self.shader == 'BSDF_DIFFUSE':
bsdf_diffuse = node_tree.nodes.new('ShaderNodeBsdfDiffuse')
- node_tree.links.new(out_node.inputs[0], bsdf_diffuse.outputs[0])
node_tree.links.new(bsdf_diffuse.inputs[0], tex_image.outputs[0])
+ if self.use_transparency:
+ bsdf_transparent = node_tree.nodes.new('ShaderNodeBsdfTransparent')
+ mix_shader = node_tree.nodes.new('ShaderNodeMixShader')
+ node_tree.links.new(out_node.inputs[0], mix_shader.outputs[0])
+ node_tree.links.new(mix_shader.inputs[0], tex_image.outputs[1])
+ node_tree.links.new(mix_shader.inputs[2], bsdf_diffuse.outputs[0])
+ node_tree.links.new(mix_shader.inputs[1], bsdf_transparent.outputs[0])
+ else:
+ node_tree.links.new(out_node.inputs[0], bsdf_diffuse.outputs[0])
elif self.shader == 'EMISSION':
emission = node_tree.nodes.new('ShaderNodeEmission')
lightpath = node_tree.nodes.new('ShaderNodeLightPath')
- node_tree.links.new(out_node.inputs[0], emission.outputs[0])
- node_tree.links.new(emission.inputs[0], tex_image.outputs[0])
- node_tree.links.new(emission.inputs[1], lightpath.outputs[0])
-
- elif self.shader == 'BSDF_DIFFUSE_BSDF_TRANSPARENT':
- bsdf_diffuse = node_tree.nodes.new('ShaderNodeBsdfDiffuse')
- bsdf_transparent = node_tree.nodes.new('ShaderNodeBsdfTransparent')
- mix_shader = node_tree.nodes.new('ShaderNodeMixShader')
- node_tree.links.new(out_node.inputs[0], mix_shader.outputs[0])
- node_tree.links.new(mix_shader.inputs[0], tex_image.outputs[1])
- node_tree.links.new(mix_shader.inputs[2], bsdf_diffuse.outputs[0])
- node_tree.links.new(mix_shader.inputs[1], bsdf_transparent.outputs[0])
- node_tree.links.new(bsdf_diffuse.inputs[0], tex_image.outputs[0])
-
- elif self.shader == 'EMISSION_BSDF_TRANSPARENT':
- emission = node_tree.nodes.new('ShaderNodeEmission')
- lightpath = node_tree.nodes.new('ShaderNodeLightPath')
- bsdf_transparent = node_tree.nodes.new('ShaderNodeBsdfTransparent')
- mix_shader = node_tree.nodes.new('ShaderNodeMixShader')
- node_tree.links.new(out_node.inputs[0], mix_shader.outputs[0])
- node_tree.links.new(mix_shader.inputs[0], tex_image.outputs[1])
- node_tree.links.new(mix_shader.inputs[2], emission.outputs[0])
- node_tree.links.new(mix_shader.inputs[1], bsdf_transparent.outputs[0])
node_tree.links.new(emission.inputs[0], tex_image.outputs[0])
node_tree.links.new(emission.inputs[1], lightpath.outputs[0])
+ if self.use_transparency:
+ bsdf_transparent = node_tree.nodes.new('ShaderNodeBsdfTransparent')
+ mix_shader = node_tree.nodes.new('ShaderNodeMixShader')
+ node_tree.links.new(out_node.inputs[0], mix_shader.outputs[0])
+ node_tree.links.new(mix_shader.inputs[0], tex_image.outputs[1])
+ node_tree.links.new(mix_shader.inputs[2], emission.outputs[0])
+ node_tree.links.new(mix_shader.inputs[1], bsdf_transparent.outputs[0])
+ else:
+ node_tree.links.new(out_node.inputs[0], emission.outputs[0])
auto_align_nodes(node_tree)
return material
diff --git a/io_import_scene_unreal_psa_psk.py b/io_import_scene_unreal_psa_psk.py
index e18aca93..a7997863 100644
--- a/io_import_scene_unreal_psa_psk.py
+++ b/io_import_scene_unreal_psa_psk.py
@@ -19,11 +19,11 @@
bl_info = {
"name": "Import Unreal Skeleton Mesh (.psk)/Animation Set (psa)",
"author": "Darknet, flufy3d, camg188",
- "version": (2, 2),
+ "version": (2, 2, 0),
"blender": (2, 64, 0),
"location": "File > Import > Skeleton Mesh (.psk)/Animation Set (psa)",
"description": "Import Skeleleton Mesh/Animation Data",
- "warning": "",
+ "warning": "may produce errors, fix in progress",
"wiki_url": "http://wiki.blender.org/index.php/Extensions:2.5/Py/"
"Scripts/Import-Export/Unreal_psk_psa",
"category": "Import-Export",
diff --git a/io_mesh_stl/__init__.py b/io_mesh_stl/__init__.py
index b08b3f7c..fc8bced2 100644
--- a/io_mesh_stl/__init__.py
+++ b/io_mesh_stl/__init__.py
@@ -109,7 +109,7 @@ class ImportSTL(Operator, ImportHelper, IOSTLOrientationHelper):
use_scene_unit = BoolProperty(
name="Scene Unit",
description="Apply current scene's unit (as defined by unit scale) to imported data",
- default=True,
+ default=False,
)
use_facet_normal = BoolProperty(
@@ -163,6 +163,11 @@ class ExportSTL(Operator, ExportHelper, IOSTLOrientationHelper):
filename_ext = ".stl"
filter_glob = StringProperty(default="*.stl", options={'HIDDEN'})
+ use_selection = BoolProperty(
+ name="Selection Only",
+ description="Export selected objects only",
+ default=False,
+ )
global_scale = FloatProperty(
name="Scale",
min=0.01, max=1000.0,
@@ -184,7 +189,15 @@ class ExportSTL(Operator, ExportHelper, IOSTLOrientationHelper):
description="Apply the modifiers before saving",
default=True,
)
+ batch_mode = EnumProperty(
+ name="Batch Mode",
+ items=(('OFF', "Off", "All data in one file"),
+ ('OBJECT', "Object", "Each object as a file"),
+ ))
+ @property
+ def check_extension(self):
+ return self.batch_mode == 'OFF'
def execute(self, context):
from . import stl_utils
@@ -193,14 +206,20 @@ class ExportSTL(Operator, ExportHelper, IOSTLOrientationHelper):
from mathutils import Matrix
keywords = self.as_keywords(ignore=("axis_forward",
"axis_up",
+ "use_selection",
"global_scale",
"check_existing",
"filter_glob",
"use_scene_unit",
"use_mesh_modifiers",
+ "batch_mode"
))
scene = context.scene
+ if self.use_selection:
+ data_seq = context.selected_objects
+ else:
+ data_seq = scene.objects
# Take into account scene's unit scale, so that 1 inch in Blender gives 1 inch elsewhere! See T42000.
global_scale = self.global_scale
@@ -211,11 +230,19 @@ class ExportSTL(Operator, ExportHelper, IOSTLOrientationHelper):
to_up=self.axis_up,
).to_4x4() * Matrix.Scale(global_scale, 4)
- faces = itertools.chain.from_iterable(
- blender_utils.faces_from_mesh(ob, global_matrix, self.use_mesh_modifiers)
- for ob in context.selected_objects)
-
- stl_utils.write_stl(faces=faces, **keywords)
+ if self.batch_mode == 'OFF':
+ faces = itertools.chain.from_iterable(
+ blender_utils.faces_from_mesh(ob, global_matrix, self.use_mesh_modifiers)
+ for ob in data_seq)
+
+ stl_utils.write_stl(faces=faces, **keywords)
+ elif self.batch_mode == 'OBJECT':
+ prefix = os.path.splitext(self.filepath)[0]
+ keywords_temp = keywords.copy()
+ for ob in data_seq:
+ faces = blender_utils.faces_from_mesh(ob, global_matrix, self.use_mesh_modifiers)
+ keywords_temp["filepath"] = prefix + bpy.path.clean_name(ob.name) + ".stl"
+ stl_utils.write_stl(faces=faces, **keywords_temp)
return {'FINISHED'}
diff --git a/io_mesh_uv_layout/__init__.py b/io_mesh_uv_layout/__init__.py
index d8d08cd1..30dff949 100644
--- a/io_mesh_uv_layout/__init__.py
+++ b/io_mesh_uv_layout/__init__.py
@@ -21,13 +21,12 @@
bl_info = {
"name": "UV Layout",
"author": "Campbell Barton, Matt Ebb",
- "version": (1, 1),
- "blender": (2, 62, 0),
+ "version": (1, 1, 1),
+ "blender": (2, 75, 0),
"location": "Image-Window > UVs > Export UV Layout",
"description": "Export the UV layout as a 2D graphic",
"warning": "",
- "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/"
- "Scripts/Import-Export/UV_Layout",
+ "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/Scripts/Import-Export/UV_Layout",
"support": 'OFFICIAL',
"category": "Import-Export",
}
diff --git a/io_mesh_uv_layout/export_uv_png.py b/io_mesh_uv_layout/export_uv_png.py
index 1168e335..5da543cc 100644
--- a/io_mesh_uv_layout/export_uv_png.py
+++ b/io_mesh_uv_layout/export_uv_png.py
@@ -131,6 +131,8 @@ def write(fw, mesh_source, image_width, image_height, opacity, face_iter_func):
scene.render.image_settings.file_format = 'PNG'
scene.render.filepath = filepath
+ scene.update()
+
data_context = {"blend_data": bpy.context.blend_data, "scene": scene}
bpy.ops.render.render(data_context, write_still=True)
diff --git a/io_scene_fbx/__init__.py b/io_scene_fbx/__init__.py
index 2a90471b..7e77ec69 100644
--- a/io_scene_fbx/__init__.py
+++ b/io_scene_fbx/__init__.py
@@ -21,8 +21,8 @@
bl_info = {
"name": "FBX format",
"author": "Campbell Barton, Bastien Montagne, Jens Restemeier",
- "version": (3, 5, 1),
- "blender": (2, 74, 0),
+ "version": (3, 7, 0),
+ "blender": (2, 76, 0),
"location": "File > Import-Export",
"description": "FBX IO meshes, UV's, vertex colors, materials, textures, cameras, lamps and actions",
"warning": "",
@@ -122,6 +122,11 @@ class ImportFBX(bpy.types.Operator, ImportHelper, IOFBXOrientationHelper):
default=0.0,
)
+ use_anim = BoolProperty(
+ name="Import Animation",
+ description="Import FBX animation",
+ default=True,
+ )
anim_offset = FloatProperty(
name="Animation Offset",
description="Offset to apply to animation during import, in frames",
@@ -199,6 +204,7 @@ class ImportFBX(bpy.types.Operator, ImportHelper, IOFBXOrientationHelper):
layout.prop(self, "use_custom_normals")
+ layout.prop(self, "use_anim")
layout.prop(self, "anim_offset")
layout.prop(self, "use_custom_props")
@@ -294,7 +300,7 @@ class ExportFBX(bpy.types.Operator, ExportHelper, IOFBXOrientationHelper):
items=(('EMPTY', "Empty", ""),
('CAMERA', "Camera", ""),
('LAMP', "Lamp", ""),
- ('ARMATURE', "Armature", ""),
+ ('ARMATURE', "Armature", "WARNING: not supported in dupli/group instances"),
('MESH', "Mesh", ""),
('OTHER', "Other", "Other geometry types, like curve, metaball, etc. (converted to meshes)"),
),
@@ -415,7 +421,8 @@ class ExportFBX(bpy.types.Operator, ExportHelper, IOFBXOrientationHelper):
bake_anim_simplify_factor = FloatProperty(
name="Simplify",
description="How much to simplify baked values (0.0 to disable, the higher the more simplified)",
- min=0.0, max=10.0, # No simplification to up to 0.05 slope/100 max_frame_step.
+ min=0.0, max=100.0, # No simplification to up to 10% of current magnitude tolerance.
+ soft_min=0.0, soft_max=10.0,
default=1.0, # default: min slope: 0.005, max frame step: 10.
)
# Anim - 6.1
diff --git a/io_scene_fbx/data_types.py b/io_scene_fbx/data_types.py
index 3a505be8..129806c9 100644
--- a/io_scene_fbx/data_types.py
+++ b/io_scene_fbx/data_types.py
@@ -36,6 +36,10 @@ FLOAT64_ARRAY = b'd'[0]
BOOL_ARRAY = b'b'[0]
BYTE_ARRAY = b'c'[0]
+# Some other misc defines
+# Known combinations so far - supposed meaning: A = animatable, A+ = animated, U = UserProp
+# VALID_NUMBER_FLAGS = {b'A', b'A+', b'AU', b'A+U'} # Not used...
+
# array types - actual length may vary (depending on underlying C implementation)!
import array
diff --git a/io_scene_fbx/export_fbx.py b/io_scene_fbx/export_fbx.py
index 64d73908..ff39ff44 100644
--- a/io_scene_fbx/export_fbx.py
+++ b/io_scene_fbx/export_fbx.py
@@ -1907,9 +1907,11 @@ def save_single(operator, scene, filepath="",
# Warning for scaled, mesh objects with armatures
if abs(ob.scale[0] - 1.0) > 0.05 or abs(ob.scale[1] - 1.0) > 0.05 or abs(ob.scale[1] - 1.0) > 0.05:
- operator.report({'WARNING'}, "Object '%s' has a scale of (%.3f, %.3f, %.3f), " \
- "Armature deformation will not work as expected " \
- "(apply Scale to fix)" % ((ob.name,) + tuple(ob.scale)))
+ operator.report(
+ {'WARNING'},
+ "Object '%s' has a scale of (%.3f, %.3f, %.3f), "
+ "Armature deformation will not work as expected "
+ "(apply Scale to fix)" % (ob.name, *ob.scale))
else:
blenParentBoneName = armob = None
diff --git a/io_scene_fbx/export_fbx_bin.py b/io_scene_fbx/export_fbx_bin.py
index 35416831..7247caea 100644
--- a/io_scene_fbx/export_fbx_bin.py
+++ b/io_scene_fbx/export_fbx_bin.py
@@ -542,8 +542,13 @@ def fbx_data_element_custom_properties(props, bid):
elem_props_set(props, "p_integer", k.encode(), v, custom=True)
elif isinstance(v, float):
elem_props_set(props, "p_double", k.encode(), v, custom=True)
- elif list_val and len(list_val) == 3:
- elem_props_set(props, "p_vector", k.encode(), list_val, custom=True)
+ elif list_val:
+ if len(list_val) == 3:
+ elem_props_set(props, "p_vector", k.encode(), list_val, custom=True)
+ else:
+ elem_props_set(props, "p_string", k.encode(), str(list_val), custom=True)
+ else:
+ elem_props_set(props, "p_string", k.encode(), str(v), custom=True)
def fbx_data_empty_elements(root, empty, scene_data):
@@ -1856,7 +1861,6 @@ def fbx_animations_do(scene_data, ref_id, f_start, f_end, start_zero, objects=No
"""
bake_step = scene_data.settings.bake_anim_step
scene = scene_data.scene
- meshes = scene_data.data_meshes
force_keying = scene_data.settings.bake_anim_use_all_bones
force_sek = scene_data.settings.bake_anim_force_startend_keying
@@ -2008,7 +2012,7 @@ def fbx_animations(scene_data):
for strip in strips:
strip.mute = False
add_anim(animations, animated,
- fbx_animations_do(scene_data, strip, strip.frame_start, strip.frame_end, True))
+ fbx_animations_do(scene_data, strip, strip.frame_start, strip.frame_end, True, force_keep=True))
strip.mute = True
for strip in strips:
@@ -2074,7 +2078,8 @@ def fbx_animations(scene_data):
ob.animation_data.action = act
frame_start, frame_end = act.frame_range # sic!
add_anim(animations, animated,
- fbx_animations_do(scene_data, (ob, act), frame_start, frame_end, True, {ob_obj}, True))
+ fbx_animations_do(scene_data, (ob, act), frame_start, frame_end, True,
+ objects={ob_obj}, force_keep=True))
# Ugly! :/
if pbones_matrices is not ...:
for pbo, mat in zip(ob.pose.bones, pbones_matrices):
@@ -2104,6 +2109,7 @@ def fbx_data_from_scene(scene, settings):
Do some pre-processing over scene's data...
"""
objtypes = settings.object_types
+ dp_objtypes = objtypes - {'ARMATURE'} # Armatures are not supported as dupli instances currently...
perfmon = PerfMon()
perfmon.level_up()
@@ -2122,6 +2128,8 @@ def fbx_data_from_scene(scene, settings):
# Duplis...
ob_obj.dupli_list_create(scene, 'RENDER')
for dp_obj in ob_obj.dupli_list:
+ if dp_obj.type not in dp_objtypes:
+ continue
objects[dp_obj] = None
ob_obj.dupli_list_clear()
@@ -2143,9 +2151,16 @@ def fbx_data_from_scene(scene, settings):
if ob_obj.type not in BLENDER_OBJECT_TYPES_MESHLIKE:
continue
ob = ob_obj.bdata
- if ob in data_meshes: # Happens with dupli instances.
- continue
use_org_data = True
+ org_ob_obj = None
+
+ # Do not want to systematically recreate a new mesh for dupliobject instances, kind of break purpose of those.
+ if ob_obj.is_dupli:
+ org_ob_obj = ObjectWrapper(ob) # We get the "real" object wrapper from that dupli instance.
+ if org_ob_obj in data_meshes:
+ data_meshes[ob_obj] = data_meshes[org_ob_obj]
+ continue
+
if settings.use_mesh_modifiers or ob.type in BLENDER_OTHER_OBJECT_TYPES:
use_org_data = False
tmp_mods = []
@@ -2168,33 +2183,46 @@ def fbx_data_from_scene(scene, settings):
if use_org_data:
data_meshes[ob_obj] = (get_blenderID_key(ob.data), ob.data, False)
+ # In case "real" source object of that dupli did not yet still existed in data_meshes, create it now!
+ if org_ob_obj is not None:
+ data_meshes[org_ob_obj] = data_meshes[ob_obj]
+
perfmon.step("FBX export prepare: Wrapping ShapeKeys...")
# ShapeKeys.
data_deformers_shape = OrderedDict()
geom_mat_co = settings.global_matrix if settings.bake_space_transform else None
- for me_obj, (me_key, me, _org) in data_meshes.items():
- if not (me.shape_keys and me.shape_keys.key_blocks):
+ for me_key, me, _free in data_meshes.values():
+ if not (me.shape_keys and len(me.shape_keys.key_blocks) > 1): # We do not want basis-only relative skeys...
+ continue
+ if me in data_deformers_shape:
continue
shapes_key = get_blender_mesh_shape_key(me)
+ # We gather all vcos first, since some skeys may be based on others...
_cos = array.array(data_types.ARRAY_FLOAT64, (0.0,)) * len(me.vertices) * 3
me.vertices.foreach_get("co", _cos)
v_cos = tuple(vcos_transformed_gen(_cos, geom_mat_co))
- for shape in me.shape_keys.key_blocks:
+ sk_cos = {}
+ for shape in me.shape_keys.key_blocks[1:]:
+ shape.data.foreach_get("co", _cos)
+ sk_cos[shape] = tuple(vcos_transformed_gen(_cos, geom_mat_co))
+ sk_base = me.shape_keys.key_blocks[0]
+
+ for shape in me.shape_keys.key_blocks[1:]:
# Only write vertices really different from org coordinates!
# XXX FBX does not like empty shapes (makes Unity crash e.g.), so we have to do this here... :/
shape_verts_co = []
shape_verts_idx = []
- shape.data.foreach_get("co", _cos)
- sv_cos = tuple(vcos_transformed_gen(_cos, geom_mat_co))
- for idx, (sv_co, v_co) in enumerate(zip(sv_cos, v_cos)):
- if similar_values_iter(sv_co, v_co):
+ sv_cos = sk_cos[shape]
+ ref_cos = v_cos if shape.relative_key == sk_base else sk_cos[shape.relative_key]
+ for idx, (sv_co, ref_co) in enumerate(zip(sv_cos, ref_cos)):
+ if similar_values_iter(sv_co, ref_co):
# Note: Maybe this is a bit too simplistic, should we use real shape base here? Though FBX does not
# have this at all... Anyway, this should cover most common cases imho.
continue
- shape_verts_co.extend(Vector(sv_co) - Vector(v_co))
+ shape_verts_co.extend(Vector(sv_co) - Vector(ref_co))
shape_verts_idx.append(idx)
if not shape_verts_co:
continue
@@ -2329,7 +2357,7 @@ def fbx_data_from_scene(scene, settings):
templates[b"Bone"] = fbx_template_def_bone(scene, settings, nbr_users=nbr_users)
if data_meshes:
- nbr = len(data_meshes)
+ nbr = len({me_key for me_key, _me, _free in data_meshes.values()})
if data_deformers_shape:
nbr += sum(len(shapes[2]) for shapes in data_deformers_shape.values())
templates[b"Geometry"] = fbx_template_def_geometry(scene, settings, nbr_users=nbr)
@@ -2536,9 +2564,11 @@ def fbx_scene_data_cleanup(scene_data):
Some final cleanup...
"""
# Delete temp meshes.
- for _key, me, free in scene_data.data_meshes.values():
- if free:
+ done_meshes = set()
+ for me_key, me, free in scene_data.data_meshes.values():
+ if free and me_key not in done_meshes:
bpy.data.meshes.remove(me)
+ done_meshes.add(me_key)
# ##### Top-level FBX elements generators. #####
@@ -2551,6 +2581,11 @@ def fbx_header_elements(root, scene_data, time=None):
app_vendor = "Blender Foundation"
app_name = "Blender (stable FBX IO)"
app_ver = bpy.app.version_string
+
+ import addon_utils
+ import sys
+ addon_ver = addon_utils.module_bl_info(sys.modules[__package__])['version']
+
# ##### Start of FBXHeaderExtension element.
header_ext = elem_empty(root, b"FBXHeaderExtension")
@@ -2573,7 +2608,8 @@ def fbx_header_elements(root, scene_data, time=None):
elem_data_single_int32(elem, b"Second", time.second)
elem_data_single_int32(elem, b"Millisecond", time.microsecond // 1000)
- elem_data_single_string_unicode(header_ext, b"Creator", "%s - %s" % (app_name, app_ver))
+ elem_data_single_string_unicode(header_ext, b"Creator", "%s - %s - %d.%d.%d"
+ % (app_name, app_ver, addon_ver[0], addon_ver[1], addon_ver[2]))
# 'SceneInfo' seems mandatory to get a valid FBX file...
# TODO use real values!
@@ -2617,7 +2653,8 @@ def fbx_header_elements(root, scene_data, time=None):
"".format(time.year, time.month, time.day, time.hour, time.minute, time.second,
time.microsecond * 1000))
- elem_data_single_string_unicode(root, b"Creator", "%s - %s" % (app_name, app_ver))
+ elem_data_single_string_unicode(root, b"Creator", "%s - %s - %d.%d.%d"
+ % (app_name, app_ver, addon_ver[0], addon_ver[1], addon_ver[2]))
# ##### Start of GlobalSettings element.
global_settings = elem_empty(root, b"GlobalSettings")
@@ -2732,7 +2769,8 @@ def fbx_objects_elements(root, scene_data):
for cam in scene_data.data_cameras:
fbx_data_camera_elements(objects, cam, scene_data)
- perfmon.step("FBX export fetch meshes (%d)..." % len(scene_data.data_meshes))
+ perfmon.step("FBX export fetch meshes (%d)..."
+ % len({me_key for me_key, _me, _free in scene_data.data_meshes.values()}))
done_meshes = set()
for me_obj in scene_data.data_meshes:
diff --git a/io_scene_fbx/fbx_utils.py b/io_scene_fbx/fbx_utils.py
index cd5f08f8..35a1b6f9 100644
--- a/io_scene_fbx/fbx_utils.py
+++ b/io_scene_fbx/fbx_utils.py
@@ -770,49 +770,43 @@ class AnimationCurveNodeWrapper:
def simplify(self, fac, step, force_keep=False):
"""
Simplifies sampled curves by only enabling samples when:
- * their values differ significantly from the previous sample ones, or
- * their values differ significantly from the previous validated sample ones, or
- * the previous validated samples are far enough from current ones in time.
+ * their values relatively differ from the previous sample ones.
"""
if not self._keys:
return
# So that, with default factor and step values (1), we get:
- max_frame_diff = step * fac * 10 # max step of 10 frames.
- value_diff_fac = fac / 1000 # min value evolution: 0.1% of whole range.
- min_significant_diff = 1.0e-5
+ min_reldiff_fac = fac * 1.0e-3 # min relative value evolution: 0.1% of current 'order of magnitude'.
+ min_absdiff_fac = 0.1 # A tenth of reldiff...
keys = self._keys
- extremums = tuple((min(values), max(values)) for values in zip(*(k[1] for k in keys)))
- min_diffs = tuple(max((mx - mn) * value_diff_fac, min_significant_diff) for mn, mx in extremums)
-
p_currframe, p_key, p_key_write = keys[0]
- p_keyed = [(p_currframe - max_frame_diff, val) for val in p_key]
+ p_keyed = list(p_key)
are_keyed = [False] * len(p_key)
for currframe, key, key_write in keys:
for idx, (val, p_val) in enumerate(zip(key, p_key)):
key_write[idx] = False
- p_keyedframe, p_keyedval = p_keyed[idx]
+ p_keyedval = p_keyed[idx]
if val == p_val:
# Never write keyframe when value is exactly the same as prev one!
continue
- if abs(val - p_val) >= min_diffs[idx]:
+ # This is contracted form of relative + absolute-near-zero difference:
+ # absdiff = abs(a - b)
+ # if absdiff < min_reldiff_fac * min_absdiff_fac:
+ # return False
+ # return (absdiff / ((abs(a) + abs(b)) / 2)) > min_reldiff_fac
+ # Note that we ignore the '/ 2' part here, since it's not much significant for us.
+ if abs(val - p_val) > (min_reldiff_fac * max(abs(val) + abs(p_val), min_absdiff_fac)):
# If enough difference from previous sampled value, key this value *and* the previous one!
key_write[idx] = True
p_key_write[idx] = True
- p_keyed[idx] = (currframe, val)
+ p_keyed[idx] = val
+ are_keyed[idx] = True
+ elif abs(val - p_keyedval) > (min_reldiff_fac * max((abs(val) + abs(p_keyedval)), min_absdiff_fac)):
+ # Else, if enough difference from previous keyed value, key this value only!
+ key_write[idx] = True
+ p_keyed[idx] = val
are_keyed[idx] = True
- else:
- frame_diff = currframe - p_keyedframe
- val_diff = abs(val - p_keyedval)
- if ((val_diff >= min_diffs[idx]) or
- ((val_diff >= min_significant_diff) and (frame_diff >= max_frame_diff))):
- # Else, if enough difference from previous keyed value
- # (or any significant difference and max gap between keys is reached),
- # key this value only!
- key_write[idx] = True
- p_keyed[idx] = (currframe, val)
- are_keyed[idx] = True
p_currframe, p_key, p_key_write = currframe, key, key_write
# If we write nothing (action doing nothing) and are in 'force_keep' mode, we key everything! :P
@@ -1199,19 +1193,15 @@ class ObjectWrapper(metaclass=MetaObjectWrapper):
# #### Duplis...
def dupli_list_create(self, scene, settings='PREVIEW'):
- if self._tag == 'OB':
- # Sigh, why raise exception here? :/
- try:
- self.bdata.dupli_list_create(scene, settings)
- except:
- pass
+ if self._tag == 'OB' and self.bdata.is_duplicator:
+ self.bdata.dupli_list_create(scene, settings)
def dupli_list_clear(self):
- if self._tag == 'OB':
+ if self._tag == 'OB'and self.bdata.is_duplicator:
self.bdata.dupli_list_clear()
def get_dupli_list(self):
- if self._tag == 'OB':
+ if self._tag == 'OB'and self.bdata.is_duplicator:
return (ObjectWrapper(dup) for dup in self.bdata.dupli_list)
return ()
dupli_list = property(get_dupli_list)
@@ -1263,7 +1253,7 @@ FBXImportSettings = namedtuple("FBXImportSettings", (
"bake_space_transform", "global_matrix_inv", "global_matrix_inv_transposed",
"use_custom_normals", "use_cycles", "use_image_search",
"use_alpha_decals", "decal_offset",
- "anim_offset",
+ "use_anim", "anim_offset",
"use_custom_props", "use_custom_props_enum_as_string",
"cycles_material_wrap_map", "image_cache",
"ignore_leaf_bones", "force_connect_children", "automatic_bone_orientation", "bone_correction_matrix",
diff --git a/io_scene_fbx/import_fbx.py b/io_scene_fbx/import_fbx.py
index 252e532d..244bc746 100644
--- a/io_scene_fbx/import_fbx.py
+++ b/io_scene_fbx/import_fbx.py
@@ -184,7 +184,6 @@ def elem_props_get_color_rgb(elem, elem_prop_id, default=None):
# FBX version 7300
assert(elem_prop.props[1] == b'Color')
assert(elem_prop.props[2] == b'')
- assert(elem_prop.props[3] in {b'A', b'A+', b'AU'})
else:
assert(elem_prop.props[1] == b'ColorRGB')
assert(elem_prop.props[2] == b'Color')
@@ -211,7 +210,6 @@ def elem_props_get_number(elem, elem_prop_id, default=None):
else:
assert(elem_prop.props[1] == b'Number')
assert(elem_prop.props[2] == b'')
- assert(elem_prop.props[3] in {b'A', b'A+', b'AU'})
# we could allow other number types
assert(elem_prop.props_type[4] == data_types.FLOAT64)
@@ -275,7 +273,6 @@ def elem_props_get_visibility(elem, elem_prop_id, default=None):
assert(elem_prop.props[0] == elem_prop_id)
assert(elem_prop.props[1] == b'Visibility')
assert(elem_prop.props[2] == b'')
- assert(elem_prop.props[3] in {b'A', b'A+', b'AU'})
# we could allow other number types
assert(elem_prop.props_type[4] == data_types.FLOAT64)
@@ -710,7 +707,7 @@ def blen_read_geom_array_setattr(generator, blen_data, blen_attr, fbx_data, stri
def check_skip(blen_idx, fbx_idx):
nonlocal print_error
- if fbx_idx == -1:
+ if fbx_idx < 0: # Negative values mean 'skip'.
return True
if blen_idx > max_idx:
if print_error:
@@ -1574,7 +1571,7 @@ class FbxImportHelperNode:
# else find how best to rotate the bone to align the Y axis with the children
best_axis = (1, 0, 0)
if len(bone_children) == 1:
- vec = bone_children[0].bind_matrix.to_translation()
+ vec = bone_children[0].get_bind_matrix().to_translation()
best_axis = Vector((0, 0, 1 if vec[2] >= 0 else -1))
if abs(vec[0]) > abs(vec[1]):
if abs(vec[0]) > abs(vec[2]):
@@ -1583,7 +1580,7 @@ class FbxImportHelperNode:
best_axis = Vector((0, 1 if vec[1] >= 0 else -1, 0))
else:
# get the child directions once because they may be checked several times
- child_locs = (child.bind_matrix.to_translation() for child in bone_children)
+ child_locs = (child.get_bind_matrix().to_translation() for child in bone_children)
child_locs = tuple(loc.normalized() for loc in child_locs if loc.magnitude > 0.0)
# I'm not sure which one I like better...
@@ -1827,7 +1824,12 @@ class FbxImportHelperNode:
force_connect_children, connected = connect_ctx
# Do nothing if force connection is not enabled!
if force_connect_children and connected is not None and connected is not ...:
- par_bone.tail = par_tail = connected[0][0] / connected[0][1]
+ # Here again we have to be wary about zero-length bones!!!
+ par_tail = connected[0][0] / connected[0][1]
+ if (par_tail - par_bone.head).magnitude < 1e-2:
+ par_bone_vec = (par_bone.tail - par_bone.head).normalized()
+ par_tail = par_bone.head + par_bone_vec * 0.01
+ par_bone.tail = par_tail
for child_bone in connected[1]:
if similar_values_iter(par_tail, child_bone.head):
child_bone.use_connect = True
@@ -1844,7 +1846,7 @@ class FbxImportHelperNode:
bone_count = 0
for child in self.children:
if child.is_bone:
- bone_size += child.bind_matrix.to_translation().magnitude
+ bone_size += child.get_bind_matrix().to_translation().magnitude
bone_count += 1
if bone_count > 0:
bone_size /= bone_count
@@ -1923,6 +1925,9 @@ class FbxImportHelperNode:
# child is not a bone
obj = self.build_node_obj(fbx_tmpl, settings)
+ if obj is None:
+ return None
+
for child in self.children:
if child.ignore:
continue
@@ -2092,7 +2097,7 @@ class FbxImportHelperNode:
child_obj = child.build_skeleton_children(fbx_tmpl, settings, scene)
return arm
- elif self.fbx_elem:
+ elif self.fbx_elem and not self.is_bone:
obj = self.build_node_obj(fbx_tmpl, settings)
# walk through children
@@ -2163,7 +2168,8 @@ class FbxImportHelperNode:
# walk through children
for child in self.children:
child_obj = child.link_hierarchy(fbx_tmpl, settings, scene)
- child_obj.parent = obj
+ if child_obj:
+ child_obj.parent = obj
return obj
else:
@@ -2195,6 +2201,7 @@ def load(operator, context, filepath="",
use_image_search=False,
use_alpha_decals=False,
decal_offset=0.0,
+ use_anim=True,
anim_offset=1.0,
use_custom_props=True,
use_custom_props_enum_as_string=True,
@@ -2241,6 +2248,8 @@ def load(operator, context, filepath="",
operator.report({'ERROR'}, "Version %r unsupported, must be %r or later" % (version, 7100))
return {'CANCELLED'}
+ print("FBX version: %r" % version)
+
if bpy.ops.object.mode_set.poll():
bpy.ops.object.mode_set(mode='OBJECT', toggle=False)
@@ -2311,7 +2320,7 @@ def load(operator, context, filepath="",
custom_fps = elem_props_get_number(fbx_settings_props, b'CustomFrameRate', 25.0)
time_mode = elem_props_get_enum(fbx_settings_props, b'TimeMode')
real_fps = {eid: val for val, eid in FBX_FRAMERATES[1:]}.get(time_mode, custom_fps)
- if real_fps < 0.0:
+ if real_fps <= 0.0:
real_fps = 25.0
scene.render.fps = round(real_fps)
scene.render.fps_base = scene.render.fps / real_fps
@@ -2322,7 +2331,7 @@ def load(operator, context, filepath="",
bake_space_transform, global_matrix_inv, global_matrix_inv_transposed,
use_custom_normals, use_cycles, use_image_search,
use_alpha_decals, decal_offset,
- anim_offset,
+ use_anim, anim_offset,
use_custom_props, use_custom_props_enum_as_string,
cycles_material_wrap_map, image_cache,
ignore_leaf_bones, force_connect_children, automatic_bone_orientation, bone_correction_matrix,
@@ -2530,7 +2539,9 @@ def load(operator, context, filepath="",
assert(fbx_props[0] is not None)
transform_data = blen_read_object_transform_preprocess(fbx_props, fbx_obj, Matrix(), use_prepost_rot)
- is_bone = fbx_obj.props[2] == b'LimbNode' # Note: 'Root' "bones" are handled as (armature) objects.
+ # Note: 'Root' "bones" are handled as (armature) objects.
+ # Note: See T46912 for first FBX file I ever saw with 'Limb' bones - thought those were totally deprecated.
+ is_bone = fbx_obj.props[2] in {b'LimbNode', b'Limb'}
fbx_helper_nodes[a_uuid] = FbxImportHelperNode(fbx_obj, bl_data, transform_data, is_bone)
# add parent-child relations and add blender data to the node
@@ -2723,98 +2734,100 @@ def load(operator, context, filepath="",
blend_shape_channels[bc_uuid] = keyblocks
_(); del _
- perfmon.step("FBX import: Animations...")
+ if use_anim:
+ perfmon.step("FBX import: Animations...")
- # Animation!
- def _():
- fbx_tmpl_astack = fbx_template_get((b'AnimationStack', b'FbxAnimStack'))
- fbx_tmpl_alayer = fbx_template_get((b'AnimationLayer', b'FbxAnimLayer'))
- stacks = {}
-
- # AnimationStacks.
- for as_uuid, fbx_asitem in fbx_table_nodes.items():
- fbx_asdata, _blen_data = fbx_asitem
- if fbx_asdata.id != b'AnimationStack' or fbx_asdata.props[2] != b'':
- continue
- stacks[as_uuid] = (fbx_asitem, {})
+ # Animation!
+ def _():
+ fbx_tmpl_astack = fbx_template_get((b'AnimationStack', b'FbxAnimStack'))
+ fbx_tmpl_alayer = fbx_template_get((b'AnimationLayer', b'FbxAnimLayer'))
+ stacks = {}
- # AnimationLayers (mixing is completely ignored for now, each layer results in an independent set of actions).
- def get_astacks_from_alayer(al_uuid):
- for as_uuid, as_ctype in fbx_connection_map.get(al_uuid, ()):
- if as_ctype.props[0] != b'OO':
+ # AnimationStacks.
+ for as_uuid, fbx_asitem in fbx_table_nodes.items():
+ fbx_asdata, _blen_data = fbx_asitem
+ if fbx_asdata.id != b'AnimationStack' or fbx_asdata.props[2] != b'':
continue
- fbx_asdata, _bl_asdata = fbx_table_nodes.get(as_uuid, (None, None))
- if (fbx_asdata is None or fbx_asdata.id != b'AnimationStack' or
- fbx_asdata.props[2] != b'' or as_uuid not in stacks):
- continue
- yield as_uuid
- for al_uuid, fbx_alitem in fbx_table_nodes.items():
- fbx_aldata, _blen_data = fbx_alitem
- if fbx_aldata.id != b'AnimationLayer' or fbx_aldata.props[2] != b'':
- continue
- for as_uuid in get_astacks_from_alayer(al_uuid):
- _fbx_asitem, alayers = stacks[as_uuid]
- alayers[al_uuid] = (fbx_alitem, {})
-
- # AnimationCurveNodes (also the ones linked to actual animated data!).
- curvenodes = {}
- for acn_uuid, fbx_acnitem in fbx_table_nodes.items():
- fbx_acndata, _blen_data = fbx_acnitem
- if fbx_acndata.id != b'AnimationCurveNode' or fbx_acndata.props[2] != b'':
- continue
- cnode = curvenodes[acn_uuid] = {}
- items = []
- for n_uuid, n_ctype in fbx_connection_map.get(acn_uuid, ()):
- if n_ctype.props[0] != b'OP':
- continue
- lnk_prop = n_ctype.props[3]
- if lnk_prop in {b'Lcl Translation', b'Lcl Rotation', b'Lcl Scaling'}:
- # n_uuid can (????) be linked to root '0' node, instead of a mere object node... See T41712.
- ob = fbx_helper_nodes.get(n_uuid, None)
- if ob is None:
+ stacks[as_uuid] = (fbx_asitem, {})
+
+ # AnimationLayers
+ # (mixing is completely ignored for now, each layer results in an independent set of actions).
+ def get_astacks_from_alayer(al_uuid):
+ for as_uuid, as_ctype in fbx_connection_map.get(al_uuid, ()):
+ if as_ctype.props[0] != b'OO':
continue
- items.append((ob, lnk_prop))
- elif lnk_prop == b'DeformPercent': # Shape keys.
- keyblocks = blend_shape_channels.get(n_uuid)
- if keyblocks is None:
+ fbx_asdata, _bl_asdata = fbx_table_nodes.get(as_uuid, (None, None))
+ if (fbx_asdata is None or fbx_asdata.id != b'AnimationStack' or
+ fbx_asdata.props[2] != b'' or as_uuid not in stacks):
continue
- items += [(kb, lnk_prop) for kb in keyblocks]
- for al_uuid, al_ctype in fbx_connection_map.get(acn_uuid, ()):
- if al_ctype.props[0] != b'OO':
- continue
- fbx_aldata, _blen_aldata = fbx_alitem = fbx_table_nodes.get(al_uuid, (None, None))
- if fbx_aldata is None or fbx_aldata.id != b'AnimationLayer' or fbx_aldata.props[2] != b'':
+ yield as_uuid
+ for al_uuid, fbx_alitem in fbx_table_nodes.items():
+ fbx_aldata, _blen_data = fbx_alitem
+ if fbx_aldata.id != b'AnimationLayer' or fbx_aldata.props[2] != b'':
continue
for as_uuid in get_astacks_from_alayer(al_uuid):
- _fbx_alitem, anim_items = stacks[as_uuid][1][al_uuid]
- assert(_fbx_alitem == fbx_alitem)
- for item, item_prop in items:
- # No need to keep curvenode FBX data here, contains nothing useful for us.
- anim_items.setdefault(item, {})[acn_uuid] = (cnode, item_prop)
-
- # AnimationCurves (real animation data).
- for ac_uuid, fbx_acitem in fbx_table_nodes.items():
- fbx_acdata, _blen_data = fbx_acitem
- if fbx_acdata.id != b'AnimationCurve' or fbx_acdata.props[2] != b'':
- continue
- for acn_uuid, acn_ctype in fbx_connection_map.get(ac_uuid, ()):
- if acn_ctype.props[0] != b'OP':
- continue
- fbx_acndata, _bl_acndata = fbx_table_nodes.get(acn_uuid, (None, None))
- if (fbx_acndata is None or fbx_acndata.id != b'AnimationCurveNode' or
- fbx_acndata.props[2] != b'' or acn_uuid not in curvenodes):
+ _fbx_asitem, alayers = stacks[as_uuid]
+ alayers[al_uuid] = (fbx_alitem, {})
+
+ # AnimationCurveNodes (also the ones linked to actual animated data!).
+ curvenodes = {}
+ for acn_uuid, fbx_acnitem in fbx_table_nodes.items():
+ fbx_acndata, _blen_data = fbx_acnitem
+ if fbx_acndata.id != b'AnimationCurveNode' or fbx_acndata.props[2] != b'':
continue
- # Note this is an infamous simplification of the compound props stuff,
- # seems to be standard naming but we'll probably have to be smarter to handle more exotic files?
- channel = {b'd|X': 0, b'd|Y': 1, b'd|Z': 2, b'd|DeformPercent': 0}.get(acn_ctype.props[3], None)
- if channel is None:
+ cnode = curvenodes[acn_uuid] = {}
+ items = []
+ for n_uuid, n_ctype in fbx_connection_map.get(acn_uuid, ()):
+ if n_ctype.props[0] != b'OP':
+ continue
+ lnk_prop = n_ctype.props[3]
+ if lnk_prop in {b'Lcl Translation', b'Lcl Rotation', b'Lcl Scaling'}:
+ # n_uuid can (????) be linked to root '0' node, instead of a mere object node... See T41712.
+ ob = fbx_helper_nodes.get(n_uuid, None)
+ if ob is None:
+ continue
+ items.append((ob, lnk_prop))
+ elif lnk_prop == b'DeformPercent': # Shape keys.
+ keyblocks = blend_shape_channels.get(n_uuid)
+ if keyblocks is None:
+ continue
+ items += [(kb, lnk_prop) for kb in keyblocks]
+ for al_uuid, al_ctype in fbx_connection_map.get(acn_uuid, ()):
+ if al_ctype.props[0] != b'OO':
+ continue
+ fbx_aldata, _blen_aldata = fbx_alitem = fbx_table_nodes.get(al_uuid, (None, None))
+ if fbx_aldata is None or fbx_aldata.id != b'AnimationLayer' or fbx_aldata.props[2] != b'':
+ continue
+ for as_uuid in get_astacks_from_alayer(al_uuid):
+ _fbx_alitem, anim_items = stacks[as_uuid][1][al_uuid]
+ assert(_fbx_alitem == fbx_alitem)
+ for item, item_prop in items:
+ # No need to keep curvenode FBX data here, contains nothing useful for us.
+ anim_items.setdefault(item, {})[acn_uuid] = (cnode, item_prop)
+
+ # AnimationCurves (real animation data).
+ for ac_uuid, fbx_acitem in fbx_table_nodes.items():
+ fbx_acdata, _blen_data = fbx_acitem
+ if fbx_acdata.id != b'AnimationCurve' or fbx_acdata.props[2] != b'':
continue
- curvenodes[acn_uuid][ac_uuid] = (fbx_acitem, channel)
+ for acn_uuid, acn_ctype in fbx_connection_map.get(ac_uuid, ()):
+ if acn_ctype.props[0] != b'OP':
+ continue
+ fbx_acndata, _bl_acndata = fbx_table_nodes.get(acn_uuid, (None, None))
+ if (fbx_acndata is None or fbx_acndata.id != b'AnimationCurveNode' or
+ fbx_acndata.props[2] != b'' or acn_uuid not in curvenodes):
+ continue
+ # Note this is an infamous simplification of the compound props stuff,
+ # seems to be standard naming but we'll probably have to be smarter to handle more exotic files?
+ channel = {b'd|X': 0, b'd|Y': 1, b'd|Z': 2, b'd|DeformPercent': 0}.get(acn_ctype.props[3], None)
+ if channel is None:
+ continue
+ curvenodes[acn_uuid][ac_uuid] = (fbx_acitem, channel)
- # And now that we have sorted all this, apply animations!
- blen_read_animations(fbx_tmpl_astack, fbx_tmpl_alayer, stacks, scene, settings.anim_offset)
+ # And now that we have sorted all this, apply animations!
+ blen_read_animations(fbx_tmpl_astack, fbx_tmpl_alayer, stacks, scene, settings.anim_offset)
- _(); del _
+ _(); del _
perfmon.step("FBX import: Assign materials...")
diff --git a/io_scene_obj/__init__.py b/io_scene_obj/__init__.py
index f8c179ee..aff0b345 100644
--- a/io_scene_obj/__init__.py
+++ b/io_scene_obj/__init__.py
@@ -21,14 +21,12 @@
bl_info = {
"name": "Wavefront OBJ format",
"author": "Campbell Barton, Bastien Montagne",
- "version": (2, 2, 1),
- "blender": (2, 74, 0),
+ "version": (2, 3, 0),
+ "blender": (2, 76, 0),
"location": "File > Import-Export",
- "description": "Import-Export OBJ, Import OBJ mesh, UV's, "
- "materials and textures",
+ "description": "Import-Export OBJ, Import OBJ mesh, UV's, materials and textures",
"warning": "",
- "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/"
- "Scripts/Import-Export/Wavefront_OBJ",
+ "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/Scripts/Import-Export/Wavefront_OBJ",
"support": 'OFFICIAL',
"category": "Import-Export"}
@@ -144,9 +142,9 @@ class ImportOBJ(bpy.types.Operator, ImportHelper, IOOBJOrientationHelper):
if bpy.data.is_saved and context.user_preferences.filepaths.use_relative_paths:
import os
- keywords["relpath"] = os.path.dirname((bpy.data.path_resolve("filepath", False).as_bytes()))
+ keywords["relpath"] = os.path.dirname(bpy.data.filepath)
- return import_obj.load(self, context, **keywords)
+ return import_obj.load(context, **keywords)
def draw(self, context):
layout = self.layout
@@ -305,7 +303,7 @@ class ExportOBJ(bpy.types.Operator, ExportHelper, IOOBJOrientationHelper):
).to_4x4())
keywords["global_matrix"] = global_matrix
- return export_obj.save(self, context, **keywords)
+ return export_obj.save(context, **keywords)
def menu_func_import(self, context):
diff --git a/io_scene_obj/export_obj.py b/io_scene_obj/export_obj.py
index 7399c2e1..26ca8a04 100644
--- a/io_scene_obj/export_obj.py
+++ b/io_scene_obj/export_obj.py
@@ -44,7 +44,7 @@ def mesh_triangulate(me):
def write_mtl(scene, filepath, path_mode, copy_set, mtl_dict):
- from mathutils import Color
+ from mathutils import Color, Vector
world = scene.world
if world:
@@ -90,6 +90,9 @@ def write_mtl(scene, filepath, path_mode, copy_set, mtl_dict):
fw('Ka %.6f %.6f %.6f\n' % (mat.ambient, mat.ambient, mat.ambient)) # Do not use world color!
fw('Kd %.6f %.6f %.6f\n' % (mat.diffuse_intensity * mat.diffuse_color)[:]) # Diffuse
fw('Ks %.6f %.6f %.6f\n' % (mat.specular_intensity * mat.specular_color)[:]) # Specular
+ # Emission, not in original MTL standard but seems pretty common, see T45766.
+ # XXX Blender has no color emission, it's using diffuse color instead...
+ fw('Ke %.6f %.6f %.6f\n' % (mat.emit * mat.diffuse_color)[:])
if hasattr(mat, "raytrace_transparency") and hasattr(mat.raytrace_transparency, "ior"):
fw('Ni %.6f\n' % mat.raytrace_transparency.ior) # Refraction index
else:
@@ -149,35 +152,43 @@ def write_mtl(scene, filepath, path_mode, copy_set, mtl_dict):
# texface overrides others
if (mtex.use_map_color_diffuse and (face_img is None) and
(mtex.use_map_warp is False) and (mtex.texture_coords != 'REFLECTION')):
- image_map["map_Kd"] = image
+ image_map["map_Kd"] = (mtex, image)
if mtex.use_map_ambient:
- image_map["map_Ka"] = image
+ image_map["map_Ka"] = (mtex, image)
# this is the Spec intensity channel but Ks stands for specular Color
'''
if mtex.use_map_specular:
- image_map["map_Ks"] = image
+ image_map["map_Ks"] = (mtex, image)
'''
if mtex.use_map_color_spec: # specular color
- image_map["map_Ks"] = image
+ image_map["map_Ks"] = (mtex, image)
if mtex.use_map_hardness: # specular hardness/glossiness
- image_map["map_Ns"] = image
+ image_map["map_Ns"] = (mtex, image)
if mtex.use_map_alpha:
- image_map["map_d"] = image
+ image_map["map_d"] = (mtex, image)
if mtex.use_map_translucency:
- image_map["map_Tr"] = image
+ image_map["map_Tr"] = (mtex, image)
if mtex.use_map_normal:
- image_map["map_Bump"] = image
+ image_map["map_Bump"] = (mtex, image)
if mtex.use_map_displacement:
- image_map["disp"] = image
+ image_map["disp"] = (mtex, image)
if mtex.use_map_color_diffuse and (mtex.texture_coords == 'REFLECTION'):
- image_map["refl"] = image
+ image_map["refl"] = (mtex, image)
if mtex.use_map_emit:
- image_map["map_Ke"] = image
+ image_map["map_Ke"] = (mtex, image)
- for key, image in sorted(image_map.items()):
+ for key, (mtex, image) in sorted(image_map.items()):
filepath = bpy_extras.io_utils.path_reference(image.filepath, source_dir, dest_dir,
path_mode, "", copy_set, image.library)
- fw('%s %s\n' % (key, repr(filepath)[1:-1]))
+ options = []
+ if key == "map_Bump":
+ if mtex.normal_factor != 1.0:
+ options.append('-bm %.6f' % mtex.normal_factor)
+ if mtex.offset != Vector((0.0, 0.0, 0.0)):
+ options.append('-o %.6f %.6f %.6f' % mtex.offset[:])
+ if mtex.scale != Vector((1.0, 1.0, 1.0)):
+ options.append('-s %.6f %.6f %.6f' % mtex.scale[:])
+ fw('%s %s %s\n' % (key, " ".join(options), repr(filepath)[1:-1]))
def test_nurbs_compat(ob):
@@ -349,18 +360,16 @@ def write_file(filepath, objects, scene,
subprogress1.step("Ignoring %s, dupli child..." % ob_main.name)
continue
- obs = []
+ obs = [(ob_main, ob_main.matrix_world)]
if ob_main.dupli_type != 'NONE':
# XXX
print('creating dupli_list on', ob_main.name)
ob_main.dupli_list_create(scene)
- obs = [(dob.object, dob.matrix) for dob in ob_main.dupli_list]
+ obs += [(dob.object, dob.matrix) for dob in ob_main.dupli_list]
# XXX debug print
- print(ob_main.name, 'has', len(obs), 'dupli children')
- else:
- obs = [(ob_main, ob_main.matrix_world)]
+ print(ob_main.name, 'has', len(obs) - 1, 'dupli children')
subprogress1.enter_substeps(len(obs))
for ob, ob_mat in obs:
@@ -415,9 +424,8 @@ def write_file(filepath, objects, scene,
if EXPORT_NORMALS and face_index_pairs:
me.calc_normals_split()
# No need to call me.free_normals_split later, as this mesh is deleted anyway!
- loops = me.loops
- else:
- loops = []
+
+ loops = me.loops
if (EXPORT_SMOOTH_GROUPS or EXPORT_SMOOTH_GROUPS_BITFLAGS) and face_index_pairs:
smooth_groups, smooth_groups_tot = me.calc_smooth_groups(EXPORT_SMOOTH_GROUPS_BITFLAGS)
@@ -504,7 +512,13 @@ def write_file(filepath, objects, scene,
uv_ls = uv_face_mapping[f_index] = []
for uv_index, l_index in enumerate(f.loop_indices):
uv = uv_layer[l_index].uv
- uv_key = veckey2d(uv)
+ # include the vertex index in the key so we don't share UV's between vertices,
+ # allowed by the OBJ spec but can cause issues for other importers, see: T47010.
+
+ # this works too, shared UV's for all verts
+ #~ uv_key = veckey2d(uv)
+ uv_key = loops[l_index].vertex_index, veckey2d(uv)
+
uv_val = uv_get(uv_key)
if uv_val is None:
uv_val = uv_dict[uv_key] = uv_unique_count
@@ -782,7 +796,9 @@ Currently the exporter lacks these features:
"""
-def save(operator, context, filepath="",
+def save(context,
+ filepath,
+ *,
use_triangles=False,
use_edges=True,
use_normals=False,
diff --git a/io_scene_obj/import_obj.py b/io_scene_obj/import_obj.py
index a9888602..7b065824 100644
--- a/io_scene_obj/import_obj.py
+++ b/io_scene_obj/import_obj.py
@@ -63,8 +63,8 @@ def obj_image_load(imagepath, DIR, recursive, relpath):
Mainly uses comprehensiveImageLoad
but tries to replace '_' with ' ' for Max's exporter replaces spaces with underscores.
"""
- if b'_' in imagepath:
- image = load_image(imagepath.replace(b'_', b' '), DIR, recursive=recursive, relpath=relpath)
+ if "_" in imagepath:
+ image = load_image(imagepath.replace("_", " "), DIR, recursive=recursive, relpath=relpath)
if image:
return image
@@ -81,10 +81,21 @@ def create_materials(filepath, relpath,
DIR = os.path.dirname(filepath)
context_material_vars = set()
- def load_material_image(blender_material, context_material_name, imagepath, type):
+ def load_material_image(blender_material, context_material_name, img_data, type):
"""
Set textures defined in .mtl file.
"""
+ imagepath = os.fsdecode(img_data[-1])
+ map_options = {}
+
+ curr_token = []
+ for token in img_data[:-1]:
+ if token.startswith(b'-'):
+ if curr_token:
+ map_options[curr_token[0]] = curr_token[1:]
+ curr_token[:] = []
+ curr_token.append(token)
+
texture = bpy.data.textures.new(name=type, type='IMAGE')
# Absolute path - c:\.. etc would work here
@@ -120,6 +131,14 @@ def create_materials(filepath, relpath,
mtex.texture_coords = 'UV'
mtex.use_map_color_spec = True
+ elif type == 'Ke':
+ mtex = blender_material.texture_slots.add()
+ mtex.use_map_color_diffuse = False
+
+ mtex.texture = texture
+ mtex.texture_coords = 'UV'
+ mtex.use_map_emit = True
+
elif type == 'Bump':
mtex = blender_material.texture_slots.add()
mtex.use_map_color_diffuse = False
@@ -128,6 +147,10 @@ def create_materials(filepath, relpath,
mtex.texture_coords = 'UV'
mtex.use_map_normal = True
+ bump_mult = map_options.get(b'-bm')
+ if bump_mult:
+ mtex.normal_factor = bump_mult[0]
+
elif type == 'D':
mtex = blender_material.texture_slots.add()
mtex.use_map_color_diffuse = False
@@ -156,14 +179,35 @@ def create_materials(filepath, relpath,
mtex.texture = texture
mtex.texture_coords = 'REFLECTION'
mtex.use_map_color_diffuse = True
+
+ map_type = map_options.get(b'-type')
+ if map_type and map_type != [b'sphere']:
+ print("WARNING, unsupported reflection type '%s', defaulting to 'sphere'"
+ "" % ' '.join(i.decode() for i in map_type))
+ mtex.mapping = 'SPHERE'
else:
raise Exception("invalid type %r" % type)
+ map_offset = map_options.get(b'-o')
+ map_scale = map_options.get(b'-s')
+ if map_offset:
+ mtex.offset.x = float(map_offset[0])
+ if len(map_offset) >= 2:
+ mtex.offset.y = float(map_offset[1])
+ if len(map_offset) >= 3:
+ mtex.offset.z = float(map_offset[2])
+ if map_scale:
+ mtex.scale.x = float(map_scale[0])
+ if len(map_scale) >= 2:
+ mtex.scale.y = float(map_scale[1])
+ if len(map_scale) >= 3:
+ mtex.scale.z = float(map_scale[2])
+
# Add an MTL with the same name as the obj if no MTLs are spesified.
- temp_mtl = os.path.splitext((os.path.basename(filepath)))[0] + b'.mtl'
+ temp_mtl = os.path.splitext((os.path.basename(filepath)))[0] + ".mtl"
- if os.path.exists(os.path.join(DIR, temp_mtl)) and temp_mtl not in material_libs:
- material_libs.append(temp_mtl)
+ if os.path.exists(os.path.join(DIR, temp_mtl)):
+ material_libs.add(temp_mtl)
del temp_mtl
# Create new materials
@@ -177,7 +221,7 @@ def create_materials(filepath, relpath,
#~ unique_materials[None] = None
#~ unique_material_images[None] = None
- for libname in material_libs:
+ for libname in sorted(material_libs):
# print(libname)
mtlpath = os.path.join(DIR, libname)
if not os.path.exists(mtlpath):
@@ -190,6 +234,7 @@ def create_materials(filepath, relpath,
do_glass = False
do_fresnel = False
do_raytrace = False
+ emit_colors = [0.0, 0.0, 0.0]
# print('\t\tloading mtl: %e' % mtlpath)
context_material = None
@@ -203,8 +248,14 @@ def create_materials(filepath, relpath,
line_id = line_split[0].lower()
if line_id == b'newmtl':
- # Finalize preview mat, if any.
+ # Finalize previous mat, if any.
if context_material:
+ emit_value = sum(emit_colors) / 3.0
+ if emit_value > 1e-6:
+ # We have to adapt it to diffuse color too...
+ emit_value /= sum(context_material.diffuse_color) / 3.0
+ context_material.emit = emit_value
+
if not do_ambient:
context_material.ambient = 0.0
@@ -243,6 +294,7 @@ def create_materials(filepath, relpath,
context_material = unique_materials.get(context_material_name)
context_material_vars.clear()
+ emit_colors[:] = [0.0, 0.0, 0.0]
do_ambient = True
do_highlight = False
do_reflection = False
@@ -267,6 +319,10 @@ def create_materials(filepath, relpath,
context_material.specular_color = (
float_func(line_split[1]), float_func(line_split[2]), float_func(line_split[3]))
context_material.specular_intensity = 1.0
+ elif line_id == b'ke':
+ # We cannot set context_material.emit right now, we need final diffuse color as well for this.
+ emit_colors[:] = [
+ float_func(line_split[1]), float_func(line_split[2]), float_func(line_split[3])]
elif line_id == b'ns':
context_material.specular_hardness = int((float_func(line_split[1]) * 0.51) + 1)
elif line_id == b'ni': # Refraction index (between 1 and 3).
@@ -340,35 +396,39 @@ def create_materials(filepath, relpath,
pass
elif line_id == b'map_ka':
- img_filepath = line_value(line.split())
- if img_filepath:
- load_material_image(context_material, context_material_name, img_filepath, 'Ka')
+ img_data = line.split()[1:]
+ if img_data:
+ load_material_image(context_material, context_material_name, img_data, 'Ka')
elif line_id == b'map_ks':
- img_filepath = line_value(line.split())
- if img_filepath:
- load_material_image(context_material, context_material_name, img_filepath, 'Ks')
+ img_data = line.split()[1:]
+ if img_data:
+ load_material_image(context_material, context_material_name, img_data, 'Ks')
elif line_id == b'map_kd':
- img_filepath = line_value(line.split())
- if img_filepath:
- load_material_image(context_material, context_material_name, img_filepath, 'Kd')
+ img_data = line.split()[1:]
+ if img_data:
+ load_material_image(context_material, context_material_name, img_data, 'Kd')
+ elif line_id == b'map_ke':
+ img_data = line.split()[1:]
+ if img_data:
+ load_material_image(context_material, context_material_name, img_data, 'Ke')
elif line_id in {b'map_bump', b'bump'}: # 'bump' is incorrect but some files use it.
- img_filepath = line_value(line.split())
- if img_filepath:
- load_material_image(context_material, context_material_name, img_filepath, 'Bump')
+ img_data = line.split()[1:]
+ if img_data:
+ load_material_image(context_material, context_material_name, img_data, 'Bump')
elif line_id in {b'map_d', b'map_tr'}: # Alpha map - Dissolve
- img_filepath = line_value(line.split())
- if img_filepath:
- load_material_image(context_material, context_material_name, img_filepath, 'D')
+ img_data = line.split()[1:]
+ if img_data:
+ load_material_image(context_material, context_material_name, img_data, 'D')
elif line_id in {b'map_disp', b'disp'}: # displacementmap
- img_filepath = line_value(line.split())
- if img_filepath:
- load_material_image(context_material, context_material_name, img_filepath, 'disp')
+ img_data = line.split()[1:]
+ if img_data:
+ load_material_image(context_material, context_material_name, img_data, 'disp')
elif line_id in {b'map_refl', b'refl'}: # reflectionmap
- img_filepath = line_value(line.split())
- if img_filepath:
- load_material_image(context_material, context_material_name, img_filepath, 'refl')
+ img_data = line.split()[1:]
+ if img_data:
+ load_material_image(context_material, context_material_name, img_data, 'refl')
else:
print("\t%r:%r (ignored)" % (filepath, line))
mtl.close()
@@ -506,43 +566,46 @@ def create_mesh(new_objects,
# NGons into triangles
if face_invalid_blenpoly:
- from bpy_extras.mesh_utils import ngon_tessellate
- ngon_face_indices = ngon_tessellate(verts_loc, face_vert_loc_indices)
- faces.extend([([face_vert_loc_indices[ngon[0]],
- face_vert_loc_indices[ngon[1]],
- face_vert_loc_indices[ngon[2]],
- ],
- [face_vert_nor_indices[ngon[0]],
- face_vert_nor_indices[ngon[1]],
- face_vert_nor_indices[ngon[2]],
- ] if face_vert_nor_indices else [],
- [face_vert_tex_indices[ngon[0]],
- face_vert_tex_indices[ngon[1]],
- face_vert_tex_indices[ngon[2]],
- ] if face_vert_tex_indices else [],
- context_material,
- context_smooth_group,
- context_object,
- [],
- )
- for ngon in ngon_face_indices]
- )
- tot_loops += 3 * len(ngon_face_indices)
-
- # edges to make ngons
- edge_users = set()
- for ngon in ngon_face_indices:
- prev_vidx = face_vert_loc_indices[ngon[-1]]
- for ngidx in ngon:
- vidx = face_vert_loc_indices[ngidx]
- if vidx == prev_vidx:
- continue # broken OBJ... Just skip.
- edge_key = (prev_vidx, vidx) if (prev_vidx < vidx) else (vidx, prev_vidx)
- prev_vidx = vidx
- if edge_key in edge_users:
- fgon_edges.add(edge_key)
- else:
- edge_users.add(edge_key)
+ # ignore triangles with invalid indices
+ if len(face_vert_loc_indices) > 3:
+ from bpy_extras.mesh_utils import ngon_tessellate
+ ngon_face_indices = ngon_tessellate(verts_loc, face_vert_loc_indices)
+ faces.extend([([face_vert_loc_indices[ngon[0]],
+ face_vert_loc_indices[ngon[1]],
+ face_vert_loc_indices[ngon[2]],
+ ],
+ [face_vert_nor_indices[ngon[0]],
+ face_vert_nor_indices[ngon[1]],
+ face_vert_nor_indices[ngon[2]],
+ ] if face_vert_nor_indices else [],
+ [face_vert_tex_indices[ngon[0]],
+ face_vert_tex_indices[ngon[1]],
+ face_vert_tex_indices[ngon[2]],
+ ] if face_vert_tex_indices else [],
+ context_material,
+ context_smooth_group,
+ context_object,
+ [],
+ )
+ for ngon in ngon_face_indices]
+ )
+ tot_loops += 3 * len(ngon_face_indices)
+
+ # edges to make ngons
+ if len(ngon_face_indices) > 1:
+ edge_users = set()
+ for ngon in ngon_face_indices:
+ prev_vidx = face_vert_loc_indices[ngon[-1]]
+ for ngidx in ngon:
+ vidx = face_vert_loc_indices[ngidx]
+ if vidx == prev_vidx:
+ continue # broken OBJ... Just skip.
+ edge_key = (prev_vidx, vidx) if (prev_vidx < vidx) else (vidx, prev_vidx)
+ prev_vidx = vidx
+ if edge_key in edge_users:
+ fgon_edges.add(edge_key)
+ else:
+ edge_users.add(edge_key)
faces.pop(f_idx)
else:
@@ -799,7 +862,9 @@ def get_float_func(filepath):
return float
-def load(operator, context, filepath,
+def load(context,
+ filepath,
+ *,
global_clamp_size=0.0,
use_smooth_groups=True,
use_edges=True,
@@ -808,7 +873,7 @@ def load(operator, context, filepath,
use_image_search=True,
use_groups_as_vgroups=False,
relpath=None,
- global_matrix=None,
+ global_matrix=None
):
"""
Called by the user interface or another script.
@@ -856,7 +921,7 @@ def load(operator, context, filepath,
verts_nor = []
verts_tex = []
faces = [] # tuples of the faces
- material_libs = [] # filanems to material libs this uses
+ material_libs = set() # filenames to material libs this OBJ uses
vertex_groups = {} # when use_groups_as_vgroups is true
# Get the string to float conversion func for this file- is 'float' for almost all files.
@@ -1036,7 +1101,7 @@ def load(operator, context, filepath,
elif line_start == b'mtllib': # usemap or usemat
# can have multiple mtllib filenames per line, mtllib can appear more than once,
# so make sure only occurrence of material exists
- material_libs = list(set(material_libs) | set(line.split()[1:]))
+ material_libs |= {os.fsdecode(f) for f in line.split()[1:]}
# Nurbs support
elif line_start == b'cstype':
@@ -1096,7 +1161,7 @@ def load(operator, context, filepath,
progress.step("Done, loading materials and images...")
- create_materials(filepath.encode(), relpath, material_libs, unique_materials,
+ create_materials(filepath, relpath, material_libs, unique_materials,
unique_material_images, use_image_search, float_func)
progress.step("Done, building geometries (verts:%i faces:%i materials: %i smoothgroups:%i) ..." %
diff --git a/io_scene_x3d/__init__.py b/io_scene_x3d/__init__.py
index 78851a62..d5c555a1 100644
--- a/io_scene_x3d/__init__.py
+++ b/io_scene_x3d/__init__.py
@@ -20,16 +20,16 @@
bl_info = {
"name": "Web3D X3D/VRML2 format",
- "author": "Campbell Barton, Bart, Bastien Montagne",
- "version": (1, 1, 0),
- "blender": (2, 74, 0),
+ "author": "Campbell Barton, Bart, Bastien Montagne, Seva Alekseyev",
+ "version": (1, 2, 0),
+ "blender": (2, 76, 0),
"location": "File > Import-Export",
"description": "Import-Export X3D, Import VRML2",
"warning": "",
- "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/"
- "Scripts/Import-Export/Web3D",
+ "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/Scripts/Import-Export/Web3D",
"support": 'OFFICIAL',
- "category": "Import-Export"}
+ "category": "Import-Export",
+}
if "bpy" in locals():
import importlib
@@ -78,7 +78,7 @@ class ImportX3D(bpy.types.Operator, ImportHelper, IOX3DOrientationHelper):
).to_4x4()
keywords["global_matrix"] = global_matrix
- return import_x3d.load(self, context, **keywords)
+ return import_x3d.load(context, **keywords)
class ExportX3D(bpy.types.Operator, ExportHelper, IOX3DOrientationHelper):
@@ -156,7 +156,7 @@ class ExportX3D(bpy.types.Operator, ExportHelper, IOX3DOrientationHelper):
).to_4x4() * Matrix.Scale(self.global_scale, 4)
keywords["global_matrix"] = global_matrix
- return export_x3d.save(self, context, **keywords)
+ return export_x3d.save(context, **keywords)
def menu_func_import(self, context):
diff --git a/io_scene_x3d/export_x3d.py b/io_scene_x3d/export_x3d.py
index e36b81e3..24bfbe1e 100644
--- a/io_scene_x3d/export_x3d.py
+++ b/io_scene_x3d/export_x3d.py
@@ -348,7 +348,7 @@ def export(file,
loc, rot, scale = matrix.decompose()
rot = rot.to_axis_angle()
- rot = rot[0].normalized()[:] + (rot[1], )
+ rot = (*rot[0].normalized(), rot[1])
ident_step = ident + (' ' * (-len(ident) + \
fw('%s<Viewpoint ' % ident)))
@@ -395,7 +395,7 @@ def export(file,
loc, rot, sca = matrix.decompose()
rot = rot.to_axis_angle()
- rot = rot[0][:] + (rot[1], )
+ rot = (*rot[0], rot[1])
fw(ident_step + 'translation="%.6f %.6f %.6f"\n' % loc[:])
# fw(ident_step + 'center="%.6f %.6f %.6f"\n' % (0, 0, 0))
@@ -1547,7 +1547,9 @@ def gzip_open_utf8(filepath, mode):
return file
-def save(operator, context, filepath="",
+def save(context,
+ filepath,
+ *,
use_selection=True,
use_mesh_modifiers=False,
use_triangulate=False,
@@ -1557,7 +1559,7 @@ def save(operator, context, filepath="",
use_h3d=False,
global_matrix=None,
path_mode='AUTO',
- name_decorations=True,
+ name_decorations=True
):
bpy.path.ensure_ext(filepath, '.x3dz' if use_compress else '.x3d')
diff --git a/io_scene_x3d/import_x3d.py b/io_scene_x3d/import_x3d.py
index aec4f890..d5494d6b 100644
--- a/io_scene_x3d/import_x3d.py
+++ b/io_scene_x3d/import_x3d.py
@@ -23,6 +23,13 @@ DEBUG = False
# This should work without a blender at all
import os
import shlex
+import math
+from math import sin, cos, pi
+
+texture_cache = {}
+material_cache = {}
+
+EPSILON = 0.0000001 # Very crude.
def imageConvertCompat(path):
@@ -374,7 +381,8 @@ class vrmlNode(object):
'DEF_NAMESPACE',
'ROUTE_IPO_NAMESPACE',
'PROTO_NAMESPACE',
- 'x3dNode')
+ 'x3dNode',
+ 'parsed')
def __init__(self, parent, node_type, lineno):
self.id = None
@@ -383,6 +391,7 @@ class vrmlNode(object):
self.blendObject = None
self.blendData = None
self.x3dNode = None # for x3d import only
+ self.parsed = None # We try to reuse objects in a smart way
if parent:
parent.children.append(self)
@@ -517,6 +526,11 @@ class vrmlNode(object):
# Check inside a list of optional types
return [child for child in self_real.children if child.getSpec() in node_spec]
+ def getChildrenBySpecCondition(self, cond): # spec could be Transform, Shape, Appearance
+ self_real = self.getRealNode()
+ # using getSpec functions allows us to use the spec of USE children that dont have their spec in their ID
+ return [child for child in self_real.children if cond(child.getSpec())]
+
def getChildBySpec(self, node_spec): # spec could be Transform, Shape, Appearance
# Use in cases where there is only ever 1 child of this type
ls = self.getChildrenBySpec(node_spec)
@@ -525,6 +539,14 @@ class vrmlNode(object):
else:
return None
+ def getChildBySpecCondition(self, cond): # spec could be Transform, Shape, Appearance
+ # Use in cases where there is only ever 1 child of this type
+ ls = self.getChildrenBySpecCondition(cond)
+ if ls:
+ return ls[0]
+ else:
+ return None
+
def getChildrenByName(self, node_name): # type could be geometry, children, appearance
self_real = self.getRealNode()
return [child for child in self_real.children if child.id if child.id[0] == node_name]
@@ -773,7 +795,7 @@ class vrmlNode(object):
def array_as_number(array_string):
array_data = []
try:
- array_data = [int(val) for val in array_string]
+ array_data = [int(val, 0) for val in array_string]
except:
try:
array_data = [float(val) for val in array_string]
@@ -1224,6 +1246,14 @@ class vrmlNode(object):
self.fields.append(value)
i += 1
+ # This is a prerequisite for DEF/USE-based material caching
+ def canHaveReferences(self):
+ return self.node_type == NODE_NORMAL and self.getDefName()
+
+ # This is a prerequisite for raw XML-based material caching. For now, only for X3D
+ def desc(self):
+ return None
+
def gzipOpen(path):
import gzip
@@ -1236,13 +1266,14 @@ def gzipOpen(path):
if data is None:
try:
- filehandle = open(path, 'rU')
+ filehandle = open(path, 'rU', encoding='utf-8', errors='surrogateescape')
data = filehandle.read()
filehandle.close()
except:
- pass
+ import traceback
+ traceback.print_exc()
else:
- data = data.decode('utf-8', "replace")
+ data = data.decode(encoding='utf-8', errors='surrogateescape')
return data
@@ -1306,6 +1337,7 @@ class x3dNode(vrmlNode):
def parse(self, IS_PROTO_DATA=False):
# print(self.x3dNode.tagName)
+ self.lineno = self.x3dNode.parse_position[0]
define = self.x3dNode.getAttributeNode('DEF')
if define:
@@ -1363,17 +1395,20 @@ class x3dNode(vrmlNode):
else:
return None
+ def canHaveReferences(self):
+ return self.x3dNode.getAttributeNode('DEF')
+
+ def desc(self):
+ return self.getRealNode().x3dNode.toxml()
+
def x3d_parse(path):
"""
Sets up the root node and returns it so load_web3d() can deal with the blender side of things.
Return root (x3dNode, '') or (None, 'Error String')
"""
-
- try:
- import xml.dom.minidom
- except:
- return None, 'Error, import XML parsing module (xml.dom.minidom) failed, install python'
+ import xml.dom.minidom
+ import xml.sax
'''
try: doc = xml.dom.minidom.parse(path)
@@ -1386,7 +1421,22 @@ def x3d_parse(path):
if data is None:
return None, 'Failed to open file: ' + path
- doc = xml.dom.minidom.parseString(data)
+ # Enable line number reporting in the parser - kinda brittle
+ def set_content_handler(dom_handler):
+ def startElementNS(name, tagName, attrs):
+ orig_start_cb(name, tagName, attrs)
+ cur_elem = dom_handler.elementStack[-1]
+ cur_elem.parse_position = (parser._parser.CurrentLineNumber, parser._parser.CurrentColumnNumber)
+
+ orig_start_cb = dom_handler.startElementNS
+ dom_handler.startElementNS = startElementNS
+ orig_set_content_handler(dom_handler)
+
+ parser = xml.sax.make_parser()
+ orig_set_content_handler = parser.setContentHandler
+ parser.setContentHandler = set_content_handler
+
+ doc = xml.dom.minidom.parseString(data, parser)
try:
x3dnode = doc.getElementsByTagName('X3D')[0]
@@ -1423,7 +1473,7 @@ for i, f in enumerate(files):
# -----------------------------------------------------------------------------------
import bpy
from bpy_extras import image_utils
-from mathutils import Vector, Matrix
+from mathutils import Vector, Matrix, Quaternion
GLOBALS = {'CIRCLE_DETAIL': 16}
@@ -1524,12 +1574,6 @@ def translateTexTransform(node, ancestry):
return new_mat
-
-# 90d X rotation
-import math
-MATRIX_Z_TO_Y = Matrix.Rotation(math.pi / 2.0, 4, 'X')
-
-
def getFinalMatrix(node, mtx, ancestry, global_matrix):
transform_nodes = [node_tx for node_tx in ancestry if node_tx.getSpec() == 'Transform']
@@ -1550,307 +1594,772 @@ def getFinalMatrix(node, mtx, ancestry, global_matrix):
return mtx
-def importMesh_IndexedFaceSet(geom, bpyima, ancestry):
- # print(geom.lineno, geom.id, vrmlNode.DEF_NAMESPACE.keys())
+# -----------------------------------------------------------------------------------
+# Mesh import utilities
+
+# Assumes that the mesh has tessfaces - doesn't support polygons.
+# Also assumes that tessfaces are all triangles.
+# Assumes that the sequence of the mesh vertices array matches
+# the source file. For indexed meshes, that's almost a given;
+# for nonindexed ones, this is a consideration.
+
+
+def importMesh_ApplyColors(bpymesh, geom, ancestry):
+ colors = geom.getChildBySpec(['ColorRGBA', 'Color'])
+ if colors:
+ if colors.getSpec() == 'ColorRGBA':
+ # Array of arrays; no need to flatten
+ rgb = [c[:3] for c
+ in colors.getFieldAsArray('color', 4, ancestry)]
+ else:
+ rgb = colors.getFieldAsArray('color', 3, ancestry)
+ tc = bpymesh.tessface_vertex_colors.new()
+ tc.data.foreach_set("color1", [i for face
+ in bpymesh.tessfaces
+ for i in rgb[face.vertices[0]]])
+ tc.data.foreach_set("color2", [i for face
+ in bpymesh.tessfaces
+ for i in rgb[face.vertices[1]]])
+ tc.data.foreach_set("color3", [i for face
+ in bpymesh.tessfaces
+ for i in rgb[face.vertices[2]]])
+
+
+# Assumes that the vertices have not been rearranged compared to the
+# source file order # or in the order assumed by the spec (e. g. in
+# Elevation, in rows by x).
+# Assumes tessfaces have been set, doesn't support polygons.
+def importMesh_ApplyNormals(bpymesh, geom, ancestry):
+ normals = geom.getChildBySpec('Normal')
+ if not normals:
+ return
- ccw = geom.getFieldAsBool('ccw', True, ancestry)
- ifs_colorPerVertex = geom.getFieldAsBool('colorPerVertex', True, ancestry) # per vertex or per face
- ifs_normalPerVertex = geom.getFieldAsBool('normalPerVertex', True, ancestry)
+ per_vertex = geom.getFieldAsBool('normalPerVertex', True, ancestry)
+ vectors = normals.getFieldAsArray('vector', 0, ancestry)
+ if per_vertex:
+ bpymesh.vertices.foreach_set("normal", vectors)
+ else:
+ bpymesh.tessfaces.foreach_set("normal", vectors)
- # This is odd how point is inside Coordinate
- # VRML not x3d
- #coord = geom.getChildByName('coord') # 'Coordinate'
+# Reads the standard Coordinate object - common for all mesh elements
+# Feeds the vertices in the mesh.
+# Rearranging the vertex order is a bad idea - other elements
+# in X3D might rely on it, if you need to rearrange, please play with
+# vertex indices in the tessfaces/polygons instead.
+#
+# Vertex culling that we have in IndexedFaceSet is an unfortunate exception,
+# brought forth by a very specific issue.
+def importMesh_ReadVertices(bpymesh, geom, ancestry):
+ # We want points here as a flat array, but the caching logic in
+ # IndexedFaceSet presumes a 2D one.
+ # The case for caching is stronger over there.
+ coord = geom.getChildBySpec('Coordinate')
+ points = coord.getFieldAsArray('point', 0, ancestry)
+ bpymesh.vertices.add(len(points) // 3)
+ bpymesh.vertices.foreach_set("co", points)
+
+
+# Assumes the mesh only contains triangular tessfaces, and the order
+# of vertices matches the source file.
+# Relies upon texture coordinates in the X3D node; if a coordinate generation
+# algorithm for a geometry is in the spec (e. g. for ElevationGrid), it needs
+# to be implemeted by the geometry handler.
+#
+# Texture transform is applied in ProcessObject.
+def importMesh_ApplyTextureToTessfaces(bpymesh, geom, ancestry, bpyima):
+ if not bpyima:
+ return
- coord = geom.getChildBySpec('Coordinate') # works for x3d and vrml
+ tex_coord = geom.getChildBySpec('TextureCoordinate')
+ if not tex_coord:
+ return
- if coord:
- ifs_points = coord.getFieldAsArray('point', 3, ancestry)
- else:
- coord = []
+ coord_points = tex_coord.getFieldAsArray('point', 2, ancestry)
+ if not coord_points:
+ return
- if not coord:
- print('\tWarnint: IndexedFaceSet has no points')
- return None, ccw
+ d = bpymesh.tessface_uv_textures.new().data
+ for face in d: # No foreach_set for nonscalars
+ face.image = bpyima
+ uv = [i for face in bpymesh.tessfaces
+ for vno in range(3) for i in coord_points[face.vertices[vno]]]
+ d.foreach_set('uv', uv)
- ifs_faces = geom.getFieldAsArray('coordIndex', 0, ancestry)
- coords_tex = None
- if ifs_faces: # In rare cases this causes problems - no faces but UVs???
+# Common steps for all triangle meshes once the geometry has been set:
+# normals, vertex colors, and texture.
+def importMesh_FinalizeTriangleMesh(bpymesh, geom, ancestry, bpyima):
+ importMesh_ApplyNormals(bpymesh, geom, ancestry)
+ importMesh_ApplyColors(bpymesh, geom, ancestry)
+ importMesh_ApplyTextureToTessfaces(bpymesh, geom, ancestry, bpyima)
+ bpymesh.validate()
+ bpymesh.update()
+ return bpymesh
- # WORKS - VRML ONLY
- # coords_tex = geom.getChildByName('texCoord')
- coords_tex = geom.getChildBySpec('TextureCoordinate')
- if coords_tex:
- ifs_texpoints = [(0, 0)] # EEKADOODLE - vertex start at 1
- ifs_texpoints.extend(coords_tex.getFieldAsArray('point', 2, ancestry))
- ifs_texfaces = geom.getFieldAsArray('texCoordIndex', 0, ancestry)
+# Assumes that the mesh is stored as polygons and loops, and the premade array
+# of texture coordinates follows the loop array.
+# The loops array must be flat.
+def importMesh_ApplyTextureToLoops(bpymesh, bpyima, loops):
+ d = bpymesh.uv_textures.new().data
+ for f in d:
+ f.image = bpyima
+ bpymesh.uv_layers[0].data.foreach_set('uv', loops)
- if not ifs_texpoints:
- # IF we have no coords, then dont bother
- coords_tex = None
- # WORKS - VRML ONLY
- # vcolor = geom.getChildByName('color')
- vcolor = geom.getChildBySpec('Color')
- vcolor_spot = None # spot color when we dont have an array of colors
- if vcolor:
- # float to char
- ifs_vcol = [(0, 0, 0)] # EEKADOODLE - vertex start at 1
- ifs_vcol.extend([col for col in vcolor.getFieldAsArray('color', 3, ancestry)])
- ifs_color_index = geom.getFieldAsArray('colorIndex', 0, ancestry)
+def flip(r, ccw):
+ return r if ccw else r[::-1]
+
+# -----------------------------------------------------------------------------------
+# Now specific geometry importers
- if not ifs_vcol:
- vcolor_spot = vcolor.getFieldAsFloatTuple('color', [], ancestry)
- # Convert faces into somthing blender can use
- edges = []
+def importMesh_IndexedTriangleSet(geom, ancestry, bpyima):
+ # Ignoring solid
+ # colorPerVertex is always true
+ ccw = geom.getFieldAsBool('ccw', True, ancestry)
+
+ bpymesh = bpy.data.meshes.new(name="XXX")
+ importMesh_ReadVertices(bpymesh, geom, ancestry)
+
+ # Read the faces
+ index = geom.getFieldAsArray('index', 0, ancestry)
+ n = len(index) // 3
+ if not ccw:
+ index = [index[3 * i + j] for i in range(n) for j in (1, 0, 2)]
+ bpymesh.tessfaces.add(n)
+ bpymesh.tessfaces.foreach_set("vertices", index)
+
+ return importMesh_FinalizeTriangleMesh(bpymesh, geom, ancestry, bpyima)
+
+
+def importMesh_IndexedTriangleStripSet(geom, ancestry, bpyima):
+ # Ignoring solid
+ # colorPerVertex is always true
+ cw = 0 if geom.getFieldAsBool('ccw', True, ancestry) else 1
+ bpymesh = bpy.data.meshes.new(name="IndexedTriangleStripSet")
+ importMesh_ReadVertices(bpymesh, geom, ancestry)
+
+ # Read the faces
+ index = geom.getFieldAsArray('index', 0, ancestry)
+ while index[-1] == -1:
+ del index[-1]
+ ngaps = sum(1 for i in index if i == -1)
+ bpymesh.tessfaces.add(len(index) - 2 - 3 * ngaps)
+
+ def triangles():
+ i = 0
+ odd = cw
+ while True:
+ yield index[i + odd]
+ yield index[i + 1 - odd]
+ yield index[i + 2]
+ odd = 1 - odd
+ i += 1
+ if i + 2 >= len(index):
+ return
+ if index[i + 2] == -1:
+ i += 3
+ odd = cw
+ bpymesh.tessfaces.foreach_set("vertices", [f for f in triangles()])
+ return importMesh_FinalizeTriangleMesh(bpymesh, geom, ancestry, bpyima)
+
+
+def importMesh_IndexedTriangleFanSet(geom, ancestry, bpyima):
+ # Ignoring solid
+ # colorPerVertex is always true
+ cw = 0 if geom.getFieldAsBool('ccw', True, ancestry) else 1
+ bpymesh = bpy.data.meshes.new(name="IndexedTriangleFanSet")
+ importMesh_ReadVertices(bpymesh, geom, ancestry)
+
+ # Read the faces
+ index = geom.getFieldAsArray('index', 0, ancestry)
+ while index[-1] == -1:
+ del index[-1]
+ ngaps = sum(1 for i in index if i == -1)
+ bpymesh.tessfaces.add(len(index) - 2 - 3 * ngaps)
+
+ def triangles():
+ i = 0
+ j = 1
+ while True:
+ yield index[i]
+ yield index[i + j + cw]
+ yield index[i + j + 1 - cw]
+ j += 1
+ if i + j + 1 >= len(index):
+ return
+ if index[i + j + 1] == -1:
+ i = j + 2
+ j = 1
+ bpymesh.tessfaces.foreach_set("vertices", [f for f in triangles()])
+ return importMesh_FinalizeTriangleMesh(bpymesh, geom, ancestry, bpyima)
- # All lists are aligned!
- faces = []
- faces_uv = [] # if ifs_texfaces is empty then the faces_uv will match faces exactly.
- faces_orig_index = [] # for ngons, we need to know our original index
- if coords_tex and ifs_texfaces:
- do_uvmap = True
+def importMesh_TriangleSet(geom, ancestry, bpyima):
+ # Ignoring solid
+ # colorPerVertex is always true
+ ccw = geom.getFieldAsBool('ccw', True, ancestry)
+ bpymesh = bpy.data.meshes.new(name="TriangleSet")
+ importMesh_ReadVertices(bpymesh, geom, ancestry)
+ n = len(bpymesh.vertices)
+ bpymesh.tessfaces.add(n // 3)
+ if ccw:
+ fv = [i for i in range(n)]
else:
- do_uvmap = False
-
- # current_face = [0] # pointer anyone
-
- def add_face(face, fuvs, orig_index):
- l = len(face)
- if l == 3 or l == 4:
- faces.append(face)
- # faces_orig_index.append(current_face[0])
- if do_uvmap:
- faces_uv.append(fuvs)
-
- faces_orig_index.append(orig_index)
- elif l == 2:
- edges.append(face)
- elif l > 4:
- for i in range(2, len(face)):
- faces.append([face[0], face[i - 1], face[i]])
- if do_uvmap:
- faces_uv.append([fuvs[0], fuvs[i - 1], fuvs[i]])
- faces_orig_index.append(orig_index)
- else:
- # faces with 1 verts? pfft!
- # still will affect index ordering
- pass
+ fv = [3 * i + j for i in range(n // 3) for j in (1, 0, 2)]
+ bpymesh.tessfaces.foreach_set("vertices", fv)
+
+ return importMesh_FinalizeTriangleMesh(bpymesh, geom, ancestry, bpyima)
+
+
+def importMesh_TriangleStripSet(geom, ancestry, bpyima):
+ # Ignoring solid
+ # colorPerVertex is always true
+ cw = 0 if geom.getFieldAsBool('ccw', True, ancestry) else 1
+ bpymesh = bpy.data.meshes.new(name="TriangleStripSet")
+ importMesh_ReadVertices(bpymesh, geom, ancestry)
+ counts = geom.getFieldAsArray('stripCount', 0, ancestry)
+ bpymesh.tessfaces.add(sum([n - 2 for n in counts]))
+
+ def triangles():
+ b = 0
+ for i in range(0, len(counts)):
+ for j in range(0, counts[i] - 2):
+ yield b + j + (j + cw) % 2
+ yield b + j + 1 - (j + cw) % 2
+ yield b + j + 2
+ b += counts[i]
+ bpymesh.tessfaces.foreach_set("vertices", [x for x in triangles()])
+
+ return importMesh_FinalizeTriangleMesh(bpymesh, geom, ancestry, bpyima)
+
+
+def importMesh_TriangleFanSet(geom, ancestry, bpyima):
+ # Ignoring solid
+ # colorPerVertex is always true
+ cw = 0 if geom.getFieldAsBool('ccw', True, ancestry) else 1
+ bpymesh = bpy.data.meshes.new(name="TriangleStripSet")
+ importMesh_ReadVertices(bpymesh, geom, ancestry)
+ counts = geom.getFieldAsArray('fanCount', 0, ancestry)
+ bpymesh.tessfaces.add(sum([n - 2 for n in counts]))
+
+ def triangles():
+ b = 0
+ for i in range(0, len(counts)):
+ for j in range(1, counts[i] - 1):
+ yield b
+ yield b + j + cw
+ yield b + j + 1 - cw
+ b += counts[i]
+ bpymesh.tessfaces.foreach_set("vertices", [x for x in triangles()])
+ return importMesh_FinalizeTriangleMesh(bpymesh, geom, ancestry, bpyima)
+
+
+def importMesh_IndexedFaceSet(geom, ancestry, bpyima):
+ # Saw the following structure in X3Ds: the first mesh has a huge set
+ # of vertices and a reasonably sized index. The rest of the meshes
+ # reference the Coordinate node from the first one, and have their
+ # own reasonably sized indices.
+ #
+ # In Blender, to the best of my knowledge, there's no way to reuse
+ # the vertex set between meshes. So we have culling logic instead -
+ # for each mesh, only leave vertices that are used for faces.
+
+ ccw = geom.getFieldAsBool('ccw', True, ancestry)
+ coord = geom.getChildBySpec('Coordinate')
+ if coord.reference:
+ points = coord.getRealNode().parsed
+ # We need unflattened coord array here, while
+ # importMesh_ReadVertices uses flattened. Can't cache both :(
+ # TODO: resolve that somehow, so that vertex set can be effectively
+ # reused between different mesh types?
+ else:
+ points = coord.getFieldAsArray('point', 3, ancestry)
+ if coord.canHaveReferences():
+ coord.parsed = points
+ index = geom.getFieldAsArray('coordIndex', 0, ancestry)
+
+ while index and index[-1] == -1:
+ del index[-1]
+
+ if len(points) >= 2 * len(index): # Need to cull
+ culled_points = []
+ cull = {} # Maps old vertex indices to new ones
+ uncull = [] # Maps new indices to the old ones
+ new_index = 0
+ else:
+ uncull = cull = None
+ faces = []
face = []
- fuvs = []
- orig_index = 0
- for i, fi in enumerate(ifs_faces):
- # ifs_texfaces and ifs_faces should be aligned
- if fi != -1:
- # face.append(int(fi)) # in rare cases this is a float
- # EEKADOODLE!!!
- # Annoyance where faces that have a zero index vert get rotated. This will then mess up UVs and VColors
- face.append(int(fi) + 1) # in rare cases this is a float, +1 because of stupid EEKADOODLE :/
-
- if do_uvmap:
- if i >= len(ifs_texfaces):
- print('\tWarning: UV Texface index out of range')
- fuvs.append(ifs_texfaces[0])
+ # Generate faces. Cull the vertices if necessary,
+ for i in index:
+ if i == -1:
+ if face:
+ faces.append(flip(face, ccw))
+ face = []
+ else:
+ if cull is not None:
+ if not(i in cull):
+ culled_points.append(points[i])
+ cull[i] = new_index
+ uncull.append(i)
+ i = new_index
+ new_index += 1
else:
- fuvs.append(ifs_texfaces[i])
+ i = cull[i]
+ face.append(i)
+ if face:
+ faces.append(flip(face, ccw)) # The last face
+
+ if cull:
+ points = culled_points
+
+ bpymesh = bpy.data.meshes.new(name="IndexedFaceSet")
+ bpymesh.from_pydata(points, [], faces)
+ # No validation here. It throws off the per-face stuff.
+
+ # Similar treatment for normal and color indices
+
+ def processPerVertexIndex(ind):
+ if ind:
+ # Deflatten into an array of arrays by face; the latter might
+ # need to be flipped
+ i = 0
+ verts_by_face = []
+ for f in faces:
+ verts_by_face.append(flip(ind[i:i + len(f)], ccw))
+ i += len(f) + 1
+ return verts_by_face
+ elif uncull:
+ return [[uncull[v] for v in f] for f in faces]
else:
- add_face(face, fuvs, orig_index)
- face = []
- if do_uvmap:
- fuvs = []
- orig_index += 1
-
- add_face(face, fuvs, orig_index)
- del add_face # dont need this func anymore
+ return faces # Reuse coordIndex, as per the spec
+
+ # Normals
+ normals = geom.getChildBySpec('Normal')
+ if normals:
+ per_vertex = geom.getFieldAsBool('normalPerVertex', True, ancestry)
+ vectors = normals.getFieldAsArray('vector', 3, ancestry)
+ normal_index = geom.getFieldAsArray('normalIndex', 0, ancestry)
+ if per_vertex:
+ co = [co for f in processPerVertexIndex(normal_index)
+ for v in f for co in vectors[v]]
+ bpymesh.vertices.foreach_set("normal", co)
+ else:
+ co = [co for (i, f) in enumerate(faces) for j in f
+ for co in vectors[normal_index[i] if normal_index else i]]
+ bpymesh.polygons.foreach_set("normal", co)
+
+ # Apply vertex/face colors
+ colors = geom.getChildBySpec(['ColorRGBA', 'Color'])
+ if colors:
+ if colors.getSpec() == 'ColorRGBA':
+ # Array of arrays; no need to flatten
+ rgb = [c[:3] for c
+ in colors.getFieldAsArray('color', 4, ancestry)]
+ else:
+ rgb = colors.getFieldAsArray('color', 3, ancestry)
+
+ color_per_vertex = geom.getFieldAsBool('colorPerVertex',
+ True, ancestry)
+ color_index = geom.getFieldAsArray('colorIndex', 0, ancestry)
+
+ d = bpymesh.vertex_colors.new().data
+ if color_per_vertex:
+ cco = [cco for f in processPerVertexIndex(color_index)
+ for v in f for cco in rgb[v]]
+ elif color_index: # Color per face with index
+ cco = [cco for (i, f) in enumerate(faces) for j in f
+ for cco in rgb[color_index[i]]]
+ else: # Color per face without index
+ cco = [cco for (i, f) in enumerate(faces) for j in f
+ for cco in rgb[i]]
+ d.foreach_set('color', cco)
+
+ # Texture
+ if bpyima:
+ tex_coord = geom.getChildBySpec('TextureCoordinate')
+ if tex_coord:
+ tex_coord_points = tex_coord.getFieldAsArray('point', 2, ancestry)
+ tex_index = geom.getFieldAsArray('texCoordIndex', 0, ancestry)
+ tex_index = processPerVertexIndex(tex_index)
+ loops = [co for f in tex_index
+ for v in f for co in tex_coord_points[v]]
+ else:
+ x_min = x_max = y_min = y_max = z_min = z_max = None
+ for f in faces:
+ # Unused vertices don't participate in size; X3DOM does so
+ for v in f:
+ (x, y, z) = points[v]
+ if x_min is None or x < x_min:
+ x_min = x
+ if x_max is None or x > x_max:
+ x_max = x
+ if y_min is None or y < y_min:
+ y_min = y
+ if y_max is None or y > y_max:
+ y_max = y
+ if z_min is None or z < z_min:
+ z_min = z
+ if z_max is None or z > z_max:
+ z_max = z
+
+ mins = (x_min, y_min, z_min)
+ deltas = (x_max - x_min, y_max - y_min, z_max - z_min)
+ axes = [0, 1, 2]
+ axes.sort(key=lambda a: (-deltas[a], a))
+ # Tuple comparison breaks ties
+ (s_axis, t_axis) = axes[0:2]
+ s_min = mins[s_axis]
+ ds = deltas[s_axis]
+ t_min = mins[t_axis]
+ dt = deltas[t_axis]
+
+ def generatePointCoords(pt):
+ return (pt[s_axis] - s_min) / ds, (pt[t_axis] - t_min) / dt
+ loops = [co for f in faces for v in f
+ for co in generatePointCoords(points[v])]
+
+ importMesh_ApplyTextureToLoops(bpymesh, bpyima, loops)
- bpymesh = bpy.data.meshes.new(name="XXX")
+ bpymesh.validate()
+ bpymesh.update()
+ return bpymesh
- # EEKADOODLE
- bpymesh.vertices.add(1 + (len(ifs_points)))
- bpymesh.vertices.foreach_set("co", [0, 0, 0] + [a for v in ifs_points for a in v]) # XXX25 speed
- # print(len(ifs_points), faces, edges, ngons)
+def importMesh_ElevationGrid(geom, ancestry, bpyima):
+ height = geom.getFieldAsArray('height', 0, ancestry)
+ x_dim = geom.getFieldAsInt('xDimension', 0, ancestry)
+ x_spacing = geom.getFieldAsFloat('xSpacing', 1, ancestry)
+ z_dim = geom.getFieldAsInt('zDimension', 0, ancestry)
+ z_spacing = geom.getFieldAsFloat('zSpacing', 1, ancestry)
+ ccw = geom.getFieldAsBool('ccw', True, ancestry)
- try:
- bpymesh.tessfaces.add(len(faces))
- bpymesh.tessfaces.foreach_set("vertices_raw", [a for f in faces for a in (f + [0] if len(f) == 3 else f)]) # XXX25 speed
- except KeyError:
- print("one or more vert indices out of range. corrupt file?")
- #for f in faces:
- # bpymesh.tessfaces.extend(faces, smooth=True)
+ # The spec assumes a certain ordering of quads; outer loop by z, inner by x
+ bpymesh = bpy.data.meshes.new(name="ElevationGrid")
+ bpymesh.vertices.add(x_dim * z_dim)
+ co = [w for x in range(x_dim) for z in range(z_dim)
+ for w in (x * x_spacing, height[x_dim * z + x], z * z_spacing)]
+ bpymesh.vertices.foreach_set("co", co)
+
+ bpymesh.tessfaces.add((x_dim - 1) * (z_dim - 1))
+ # If the ccw is off, we flip the 2nd and the 4th vertices of each face.
+ # For quad tessfaces, it's important that the final vertex index is not 0
+ # (Blender treats it as a triangle then).
+ # So simply reversing the face is not an option.
+ verts = [i for x in range(x_dim - 1) for z in range(z_dim - 1)
+ for i in (z * x_dim + x,
+ z * x_dim + x + 1 if ccw else (z + 1) * x_dim + x,
+ (z + 1) * x_dim + x + 1,
+ (z + 1) * x_dim + x if ccw else z * x_dim + x + 1)]
+ bpymesh.tessfaces.foreach_set("vertices_raw", verts)
+
+ importMesh_ApplyNormals(bpymesh, geom, ancestry)
+ # ApplyColors won't work here; faces are quads, and also per-face
+ # coloring should be supported
+ colors = geom.getChildBySpec(['ColorRGBA', 'Color'])
+ if colors:
+ if colors.getSpec() == 'ColorRGBA':
+ rgb = [c[:3] for c
+ in colors.getFieldAsArray('color', 4, ancestry)]
+ # Array of arrays; no need to flatten
+ else:
+ rgb = colors.getFieldAsArray('color', 3, ancestry)
+
+ tc = bpymesh.tessface_vertex_colors.new()
+ tcd = tc.data
+ if geom.getFieldAsBool('colorPerVertex', True, ancestry):
+ # Per-vertex coloring
+ # Note the 2/4 flip here
+ tcd.foreach_set("color1", [c for x in range(x_dim - 1)
+ for z in range(z_dim - 1)
+ for c in rgb[z * x_dim + x]])
+ tcd.foreach_set("color2" if ccw else "color4",
+ [c for x in range(x_dim - 1)
+ for z in range(z_dim - 1)
+ for c in rgb[z * x_dim + x + 1]])
+ tcd.foreach_set("color3", [c for x in range(x_dim - 1)
+ for z in range(z_dim - 1)
+ for c in rgb[(z + 1) * x_dim + x + 1]])
+ tcd.foreach_set("color4" if ccw else "color2",
+ [c for x in range(x_dim - 1)
+ for z in range(z_dim - 1)
+ for c in rgb[(z + 1) * x_dim + x]])
+ else: # Coloring per face
+ colors = [c for x in range(x_dim - 1)
+ for z in range(z_dim - 1) for c in rgb[z * (x_dim - 1) + x]]
+ tcd.foreach_set("color1", colors)
+ tcd.foreach_set("color2", colors)
+ tcd.foreach_set("color3", colors)
+ tcd.foreach_set("color4", colors)
+
+ # Textures also need special treatment; it's all quads,
+ # and there's a builtin algorithm for coordinate generation
+ if bpyima:
+ tex_coord = geom.getChildBySpec('TextureCoordinate')
+ if tex_coord:
+ coord_points = tex_coord.getFieldAsArray('point', 2, ancestry)
+ else:
+ coord_points = [(i / (x_dim - 1), j / (z_dim - 1))
+ for i in range(x_dim)
+ for j in range(z_dim)]
+
+ d = bpymesh.tessface_uv_textures.new().data
+ for face in d: # No foreach_set for nonscalars
+ face.image = bpyima
+ # Rather than repeat the face/vertex algorithm from above, we read
+ # the vertex index back from tessfaces. Might be suboptimal.
+ uv = [i for face in bpymesh.tessfaces
+ for vno in range(4)
+ for i in coord_points[face.vertices[vno]]]
+ d.foreach_set('uv_raw', uv)
bpymesh.validate()
- # bpymesh.update() # cant call now, because it would convert tessface
-
- if len(bpymesh.tessfaces) != len(faces):
- print('\tWarning: adding faces did not work! file is invalid, not adding UVs or vcolors')
- bpymesh.update()
- return bpymesh, ccw
-
- # Apply UVs if we have them
- if not do_uvmap:
- faces_uv = faces # fallback, we didnt need a uvmap in the first place, fallback to the face/vert mapping.
- if coords_tex:
- #print(ifs_texpoints)
- # print(geom)
- uvlay = bpymesh.tessface_uv_textures.new()
-
- for i, f in enumerate(uvlay.data):
- f.image = bpyima
- fuv = faces_uv[i] # uv indices
- for j, uv in enumerate(f.uv):
- # print(fuv, j, len(ifs_texpoints))
- try:
- f.uv[j] = ifs_texpoints[fuv[j] + 1] # XXX25, speedup
- except:
- print('\tWarning: UV Index out of range')
- f.uv[j] = ifs_texpoints[0] # XXX25, speedup
+ bpymesh.update()
+ return bpymesh
- elif bpyima and len(bpymesh.tessfaces):
- # Oh Bugger! - we cant really use blenders ORCO for for texture space since texspace dosnt rotate.
- # we have to create VRML's coords as UVs instead.
- # VRML docs
- """
- If the texCoord field is NULL, a default texture coordinate mapping is calculated using the local
- coordinate system bounding box of the shape. The longest dimension of the bounding box defines the S coordinates,
- and the next longest defines the T coordinates. If two or all three dimensions of the bounding box are equal,
- ties shall be broken by choosing the X, Y, or Z dimension in that order of preference.
- The value of the S coordinate ranges from 0 to 1, from one end of the bounding box to the other.
- The T coordinate ranges between 0 and the ratio of the second greatest dimension of the bounding box to the greatest dimension.
- """
+def importMesh_Extrusion(geom, ancestry, bpyima):
+ # Interestingly, the spec doesn't allow for vertex/face colors in this
+ # element, nor for normals.
+ # Since coloring and normals are not supported here, and also large
+ # polygons for caps might be required, we shall use from_pydata().
- # Note, S,T == U,V
- # U gets longest, V gets second longest
- xmin, ymin, zmin = ifs_points[0]
- xmax, ymax, zmax = ifs_points[0]
- for co in ifs_points:
- x, y, z = co
- if x < xmin:
- xmin = x
- if y < ymin:
- ymin = y
- if z < zmin:
- zmin = z
-
- if x > xmax:
- xmax = x
- if y > ymax:
- ymax = y
- if z > zmax:
- zmax = z
-
- xlen = xmax - xmin
- ylen = ymax - ymin
- zlen = zmax - zmin
-
- depth_min = xmin, ymin, zmin
- depth_list = [xlen, ylen, zlen]
- depth_sort = depth_list[:]
- depth_sort.sort()
-
- depth_idx = [depth_list.index(val) for val in depth_sort]
-
- axis_u = depth_idx[-1]
- axis_v = depth_idx[-2] # second longest
-
- # Hack, swap these !!! TODO - Why swap??? - it seems to work correctly but should not.
- # axis_u,axis_v = axis_v,axis_u
-
- min_u = depth_min[axis_u]
- min_v = depth_min[axis_v]
- depth_u = depth_list[axis_u]
- depth_v = depth_list[axis_v]
-
- depth_list[axis_u]
-
- if axis_u == axis_v:
- # This should be safe because when 2 axies have the same length, the lower index will be used.
- axis_v += 1
-
- uvlay = bpymesh.tessface_uv_textures.new()
-
- # HACK !!! - seems to be compatible with Cosmo though.
- depth_v = depth_u = max(depth_v, depth_u)
-
- bpymesh_vertices = bpymesh.vertices[:]
- bpymesh_faces = bpymesh.tessfaces[:]
-
- for j, f in enumerate(uvlay.data):
- f.image = bpyima
- fuv = f.uv
- f_v = bpymesh_faces[j].vertices[:] # XXX25 speed
-
- for i, v in enumerate(f_v):
- co = bpymesh_vertices[v].co
- fuv[i] = (co[axis_u] - min_u) / depth_u, (co[axis_v] - min_v) / depth_v
-
- # Add vcote
- if vcolor:
- # print(ifs_vcol)
- collay = bpymesh.tessface_vertex_colors.new()
-
- for f_idx, f in enumerate(collay.data):
- fv = bpymesh.tessfaces[f_idx].vertices[:]
- if len(fv) == 3: # XXX speed
- fcol = f.color1, f.color2, f.color3
- else:
- fcol = f.color1, f.color2, f.color3, f.color4
- if ifs_colorPerVertex:
- for i, c in enumerate(fcol):
- color_index = fv[i] # color index is vert index
- if ifs_color_index:
- try:
- color_index = ifs_color_index[color_index]
- except:
- print('\tWarning: per vertex color index out of range')
- continue
+ ccw = geom.getFieldAsBool('ccw', True, ancestry)
+ begin_cap = geom.getFieldAsBool('beginCap', True, ancestry)
+ end_cap = geom.getFieldAsBool('endCap', True, ancestry)
+ cross = geom.getFieldAsArray('crossSection', 2, ancestry)
+ if not cross:
+ cross = ((1, 1), (1, -1), (-1, -1), (-1, 1), (1, 1))
+ spine = geom.getFieldAsArray('spine', 3, ancestry)
+ if not spine:
+ spine = ((0, 0, 0), (0, 1, 0))
+ orient = geom.getFieldAsArray('orientation', 4, ancestry)
+ if orient:
+ orient = [Quaternion(o[:3], o[3]).to_matrix()
+ if o[3] else None for o in orient]
+ scale = geom.getFieldAsArray('scale', 2, ancestry)
+ if scale:
+ scale = [Matrix(((s[0], 0, 0), (0, 1, 0), (0, 0, s[1])))
+ if s[0] != 1 or s[1] != 1 else None for s in scale]
+
+ # Special treatment for the closed spine and cross section.
+ # Let's save some memory by not creating identical but distinct vertices;
+ # later we'll introduce conditional logic to link the last vertex with
+ # the first one where necessary.
+ cross_closed = cross[0] == cross[-1]
+ if cross_closed:
+ cross = cross[:-1]
+ nc = len(cross)
+ cross = [Vector((c[0], 0, c[1])) for c in cross]
+ ncf = nc if cross_closed else nc - 1
+ # Face count along the cross; for closed cross, it's the same as the
+ # respective vertex count
+
+ spine_closed = spine[0] == spine[-1]
+ if spine_closed:
+ spine = spine[:-1]
+ ns = len(spine)
+ spine = [Vector(s) for s in spine]
+ nsf = ns if spine_closed else ns - 1
+
+ # This will be used for fallback, where the current spine point joins
+ # two collinear spine segments. No need to recheck the case of the
+ # closed spine/last-to-first point juncture; if there's an angle there,
+ # it would kick in on the first iteration of the main loop by spine.
+ def findFirstAngleNormal():
+ for i in range(1, ns - 1):
+ spt = spine[i]
+ z = (spine[i + 1] - spt).cross(spine[i - 1] - spt)
+ if z.length > EPSILON:
+ return z
+ # All the spines are collinear. Fallback to the rotated source
+ # XZ plane.
+ # TODO: handle the situation where the first two spine points match
+ v = spine[1] - spine[0]
+ orig_y = Vector((0, 1, 0))
+ orig_z = Vector((0, 0, 1))
+ if v.cross(orig_y).length >= EPSILON:
+ # Spine at angle with global y - rotate the z accordingly
+ orig_z.rotate(orig_y.rotation_difference(v))
+ return orig_z
+
+ verts = []
+ z = None
+ for i, spt in enumerate(spine):
+ if (i > 0 and i < ns - 1) or spine_closed:
+ snext = spine[(i + 1) % ns]
+ sprev = spine[(i - 1 + ns) % ns]
+ y = snext - sprev
+ vnext = snext - spt
+ vprev = sprev - spt
+ try_z = vnext.cross(vprev)
+ # Might be zero, then all kinds of fallback
+ if try_z.length > EPSILON:
+ if z is not None and try_z.dot(z) < 0:
+ try_z.negate()
+ z = try_z
+ elif not z: # No z, and no previous z.
+ # Look ahead, see if there's at least one point where
+ # spines are not collinear.
+ z = findFirstAngleNormal()
+ elif i == 0: # And non-crossed
+ snext = spine[i + 1]
+ y = snext - spt
+ z = findFirstAngleNormal()
+ else: # last point and not crossed
+ sprev = spine[i - 1]
+ y = spt - sprev
+ # If there's more than one point in the spine, z is already set.
+ # One point in the spline is an error anyway.
+
+ x = y.cross(z)
+ m = Matrix(((x.x, y.x, z.x), (x.y, y.y, z.y), (x.z, y.z, z.z)))
+ # Columns are the unit vectors for the xz plane for the cross-section
+ m.normalize()
+ if orient:
+ mrot = orient[i] if len(orient) > 1 else orient[0]
+ if mrot:
+ m *= mrot # Not sure about this. Counterexample???
+ if scale:
+ mscale = scale[i] if len(scale) > 1 else scale[0]
+ if mscale:
+ m *= mscale
+ # First the cross-section 2-vector is scaled,
+ # then applied to the xz plane unit vectors
+ for cpt in cross:
+ verts.append((spt + m * cpt).to_tuple())
+ # Could've done this with a single 4x4 matrix... Oh well
+
+ # The method from_pydata() treats correctly quads with final vertex
+ # index being zero.
+ # So we just flip the vertices if ccw is off.
- if color_index < len(ifs_vcol):
- c.r, c.g, c.b = ifs_vcol[color_index]
- else:
- #print('\tWarning: per face color index out of range')
- pass
- else:
- if vcolor_spot: # use 1 color, when ifs_vcol is []
- for c in fcol:
- c.r, c.g, c.b = vcolor_spot
- else:
- color_index = faces_orig_index[f_idx] # color index is face index
- #print(color_index, ifs_color_index)
- if ifs_color_index:
- if color_index >= len(ifs_color_index):
- print('\tWarning: per face color index out of range')
- color_index = 0
- else:
- color_index = ifs_color_index[color_index]
- # skip eedadoodle vert
- color_index += 1
- try:
- col = ifs_vcol[color_index]
- except IndexError:
- # TODO, look
- col = (1.0, 1.0, 1.0)
- for i, c in enumerate(fcol):
- c.r, c.g, c.b = col
+ faces = []
+ if begin_cap:
+ faces.append(flip([x for x in range(nc - 1, -1, -1)], ccw))
+
+ # Order of edges in the face: forward along cross, forward along spine,
+ # backward along cross, backward along spine, flipped if now ccw.
+ # This order is assumed later in the texture coordinate assignment;
+ # please don't change without syncing.
+
+ faces += [flip((
+ s * nc + c,
+ s * nc + (c + 1) % nc,
+ (s + 1) * nc + (c + 1) % nc,
+ (s + 1) * nc + c), ccw) for s in range(ns - 1) for c in range(ncf)]
+
+ if spine_closed:
+ # The faces between the last and the first spine poins
+ b = (ns - 1) * nc
+ faces += [flip((
+ b + c,
+ b + (c + 1) % nc,
+ (c + 1) % nc,
+ c), ccw) for c in range(ncf)]
+
+ if end_cap:
+ faces.append(flip([(ns - 1) * nc + x for x in range(0, nc)], ccw))
+
+ bpymesh = bpy.data.meshes.new(name="Extrusion")
+ bpymesh.from_pydata(verts, [], faces)
+
+ # Polygons and loops here, not tessfaces. The way we deal with
+ # textures in triangular meshes doesn't apply.
+ if bpyima:
+ # The structure of the loop array goes: cap, side, cap
+ if begin_cap or end_cap: # Need dimensions
+ x_min = x_max = z_min = z_max = None
+ for c in cross:
+ (x, z) = (c.x, c.z)
+ if x_min is None or x < x_min:
+ x_min = x
+ if x_max is None or x > x_max:
+ x_max = x
+ if z_min is None or z < z_min:
+ z_min = z
+ if z_max is None or z > z_max:
+ z_max = z
+ dx = x_max - x_min
+ dz = z_max - z_min
+ cap_scale = dz if dz > dx else dx
+
+ # Takes an index in the cross array, returns scaled
+ # texture coords for cap texturing purposes
+ def scaledLoopVertex(i):
+ c = cross[i]
+ return (c.x - x_min) / cap_scale, (c.z - z_min) / cap_scale
+
+ # X3DOM uses raw cap shape, not a scaled one. So we will, too.
+
+ loops = []
+ mloops = bpymesh.loops
+ if begin_cap: # vertex indices match the indices in cross
+ # Rely on the loops in the mesh; don't repeat the face
+ # generation logic here
+ loops += [co for i in range(nc)
+ for co in scaledLoopVertex(mloops[i].vertex_index)]
+
+ # Sides
+ # Same order of vertices as in face generation
+ # We don't rely on the loops in the mesh; instead,
+ # we repeat the face generation logic.
+ loops += [co for s in range(nsf)
+ for c in range(ncf)
+ for v in flip(((c / ncf, s / nsf),
+ ((c + 1) / ncf, s / nsf),
+ ((c + 1) / ncf, (s + 1) / nsf),
+ (c / ncf, (s + 1) / nsf)), ccw) for co in v]
+
+ if end_cap:
+ # Base loop index for end cap
+ lb = ncf * nsf * 4 + (nc if begin_cap else 0)
+ # Rely on the loops here too.
+ loops += [co for i in range(nc) for co
+ in scaledLoopVertex(mloops[lb + i].vertex_index % nc)]
+ importMesh_ApplyTextureToLoops(bpymesh, bpyima, loops)
+
+ bpymesh.validate(True)
+ bpymesh.update()
+ return bpymesh
- # XXX25
- # bpymesh.vertices.delete([0, ]) # EEKADOODLE
- bpymesh.update()
- bpymesh.validate()
+# -----------------------------------------------------------------------------------
+# Line and point sets
+
- return bpymesh, ccw
+def importMesh_LineSet(geom, ancestry, bpyima):
+ # TODO: line display properties are ignored
+ # Per-vertex color is ignored
+ coord = geom.getChildBySpec('Coordinate')
+ src_points = coord.getFieldAsArray('point', 3, ancestry)
+ # Array of 3; Blender needs arrays of 4
+ bpycurve = bpy.data.curves.new("LineSet", 'CURVE')
+ bpycurve.dimensions = '3D'
+ counts = geom.getFieldAsArray('vertexCount', 0, ancestry)
+ b = 0
+ for n in counts:
+ sp = bpycurve.splines.new('POLY')
+ sp.points.add(n - 1) # points already has one element
+
+ def points():
+ for x in src_points[b:b + n]:
+ yield x[0]
+ yield x[1]
+ yield x[2]
+ yield 0
+ sp.points.foreach_set('co', [x for x in points()])
+ b += n
+ return bpycurve
-def importMesh_IndexedLineSet(geom, ancestry):
+def importMesh_IndexedLineSet(geom, ancestry, _):
# VRML not x3d
- #coord = geom.getChildByName('coord') # 'Coordinate'
+ # coord = geom.getChildByName('coord') # 'Coordinate'
coord = geom.getChildBySpec('Coordinate') # works for x3d and vrml
if coord:
points = coord.getFieldAsArray('point', 3, ancestry)
@@ -1874,7 +2383,8 @@ def importMesh_IndexedLineSet(geom, ancestry):
line.append(int(il))
lines.append(line)
- # vcolor = geom.getChildByName('color') # blender dosnt have per vertex color
+ # vcolor = geom.getChildByName('color')
+ # blender dosnt have per vertex color
bpycurve = bpy.data.curves.new('IndexedCurve', 'CURVE')
bpycurve.dimensions = '3D'
@@ -1891,18 +2401,18 @@ def importMesh_IndexedLineSet(geom, ancestry):
return bpycurve
-def importMesh_PointSet(geom, ancestry):
+def importMesh_PointSet(geom, ancestry, _):
# VRML not x3d
- #coord = geom.getChildByName('coord') # 'Coordinate'
coord = geom.getChildBySpec('Coordinate') # works for x3d and vrml
if coord:
points = coord.getFieldAsArray('point', 3, ancestry)
else:
points = []
- # vcolor = geom.getChildByName('color') # blender dosnt have per vertex color
+ # vcolor = geom.getChildByName('color')
+ # blender dosnt have per vertex color
- bpymesh = bpy.data.meshes.new("XXX")
+ bpymesh = bpy.data.meshes.new("PointSet")
bpymesh.vertices.add(len(points))
bpymesh.vertices.foreach_set("co", [a for v in points for a in v])
@@ -1910,341 +2420,715 @@ def importMesh_PointSet(geom, ancestry):
bpymesh.update()
return bpymesh
+
+# -----------------------------------------------------------------------------------
+# Primitives
+# SA: they used to use bpy.ops for primitive creation. That was
+# unbelievably slow on complex scenes. I rewrote to generate meshes
+# by hand.
+
+
GLOBALS['CIRCLE_DETAIL'] = 12
-def bpy_ops_add_object_hack(): # XXX25, evil
- scene = bpy.context.scene
- obj = scene.objects[0]
- scene.objects.unlink(obj)
- bpymesh = obj.data
- bpy.data.objects.remove(obj)
+def importMesh_Sphere(geom, ancestry, bpyima):
+ # solid is ignored.
+ # Extra field 'subdivision="n m"' attribute, specifying how many
+ # rings and segments to use (X3DOM).
+ r = geom.getFieldAsFloat('radius', 0.5, ancestry)
+ subdiv = geom.getFieldAsArray('subdivision', 0, ancestry)
+ if subdiv:
+ if len(subdiv) == 1:
+ nr = ns = subdiv[0]
+ else:
+ (nr, ns) = subdiv
+ else:
+ nr = ns = GLOBALS['CIRCLE_DETAIL']
+ # used as both ring count and segment count
+ lau = pi / nr # Unit angle of latitude (rings) for the given tesselation
+ lou = 2 * pi / ns # Unit angle of longitude (segments)
+
+ bpymesh = bpy.data.meshes.new(name="Sphere")
+
+ bpymesh.vertices.add(ns * (nr - 1) + 2)
+ # The non-polar vertices go from x=0, negative z plane counterclockwise -
+ # to -x, to +z, to +x, back to -z
+ co = [0, r, 0, 0, -r, 0] # +y and -y poles
+ co += [r * coe for ring in range(1, nr) for seg in range(ns)
+ for coe in (-sin(lou * seg) * sin(lau * ring),
+ cos(lau * ring),
+ -cos(lou * seg) * sin(lau * ring))]
+ bpymesh.vertices.foreach_set('co', co)
+
+ tf = bpymesh.tessfaces
+ tf.add(ns * nr)
+ vb = 2 + (nr - 2) * ns # First vertex index for the bottom cap
+ fb = (nr - 1) * ns # First face index for the bottom cap
+
+ # Because of tricky structure, assign texture coordinates along with
+ # face creation. Can't easily do foreach_set, 'cause caps are triangles and
+ # sides are quads.
+
+ if bpyima:
+ tex = bpymesh.tessface_uv_textures.new().data
+ for face in tex: # No foreach_set for nonscalars
+ face.image = bpyima
+
+ # Faces go in order: top cap, sides, bottom cap.
+ # Sides go by ring then by segment.
+
+ # Caps
+ # Top cap face vertices go in order: down right up
+ # (starting from +y pole)
+ # Bottom cap goes: up left down (starting from -y pole)
+ for seg in range(ns):
+ tf[seg].vertices = (0, seg + 2, (seg + 1) % ns + 2)
+ tf[fb + seg].vertices = (1, vb + (seg + 1) % ns, vb + seg)
+ if bpyima:
+ tex[seg].uv = (((seg + 0.5) / ns, 1),
+ (seg / ns, 1 - 1 / nr),
+ ((seg + 1) / ns, 1 - 1 / nr))
+ tex[fb + seg].uv = (((seg + 0.5) / ns, 0),
+ ((seg + 1) / ns, 1 / nr),
+ (seg / ns, 1 / nr))
+
+ # Sides
+ # Side face vertices go in order: down right up left
+ for ring in range(nr - 2):
+ tvb = 2 + ring * ns
+ # First vertex index for the top edge of the ring
+ bvb = tvb + ns
+ # First vertex index for the bottom edge of the ring
+ rfb = ns * (ring + 1)
+ # First face index for the ring
+ for seg in range(ns):
+ nseg = (seg + 1) % ns
+ tf[rfb + seg].vertices_raw = (tvb + seg, bvb + seg, bvb + nseg, tvb + nseg)
+ if bpyima:
+ tex[rfb + seg].uv_raw = (seg / ns, 1 - (ring + 1) / nr,
+ seg / ns, 1 - (ring + 2) / nr,
+ (seg + 1) / ns, 1 - (ring + 2) / nr,
+ (seg + 1) / ns, 1 - (ring + 1) / nr)
+
+ bpymesh.validate(False)
+ bpymesh.update()
return bpymesh
-def importMesh_Sphere(geom, ancestry):
- diameter = geom.getFieldAsFloat('radius', 0.5, ancestry)
- # bpymesh = Mesh.Primitives.UVsphere(GLOBALS['CIRCLE_DETAIL'], GLOBALS['CIRCLE_DETAIL'], diameter)
+def importMesh_Cylinder(geom, ancestry, bpyima):
+ # solid is ignored
+ # no ccw in this element
+ # Extra parameter subdivision="n" - how many faces to use
+ radius = geom.getFieldAsFloat('radius', 1.0, ancestry)
+ height = geom.getFieldAsFloat('height', 2, ancestry)
+ bottom = geom.getFieldAsBool('bottom', True, ancestry)
+ side = geom.getFieldAsBool('side', True, ancestry)
+ top = geom.getFieldAsBool('top', True, ancestry)
- bpy.ops.mesh.primitive_uv_sphere_add(segments=GLOBALS['CIRCLE_DETAIL'],
- ring_count=GLOBALS['CIRCLE_DETAIL'],
- size=diameter,
- view_align=False,
- enter_editmode=False,
- )
+ n = geom.getFieldAsInt('subdivision', GLOBALS['CIRCLE_DETAIL'], ancestry)
- bpymesh = bpy_ops_add_object_hack()
+ nn = n * 2
+ yvalues = (height / 2, -height / 2)
+ angle = 2 * pi / n
- bpymesh.transform(MATRIX_Z_TO_Y)
+ # The seam is at x=0, z=-r, vertices go ccw -
+ # to pos x, to neg z, to neg x, back to neg z
+ verts = [(-radius * sin(angle * i), y, -radius * cos(angle * i))
+ for i in range(n) for y in yvalues]
+ faces = []
+ if side:
+ # Order of edges in side faces: up, left, down, right.
+ # Texture coordinate logic depends on it.
+ faces += [(i * 2 + 3, i * 2 + 2, i * 2, i * 2 + 1)
+ for i in range(n - 1)] + [(1, 0, nn - 2, nn - 1)]
+ if top:
+ faces += [[x for x in range(0, nn, 2)]]
+ if bottom:
+ faces += [[x for x in range(nn - 1, -1, -2)]]
+
+ bpymesh = bpy.data.meshes.new(name="Cylinder")
+ bpymesh.from_pydata(verts, [], faces)
+ # Tried constructing the mesh manually from polygons/loops/edges,
+ # the difference in performance on Blender 2.74 (Win64) is negligible.
+
+ bpymesh.validate(False)
+
+ # Polygons here, not tessfaces
+ # The structure of the loop array goes: cap, side, cap.
+ if bpyima:
+ loops = []
+ if side:
+ loops += [co for i in range(n)
+ for co in ((i + 1) / n, 0, (i + 1) / n, 1, i / n, 1, i / n, 0)]
+
+ if top:
+ loops += [0.5 + co / 2 for i in range(n)
+ for co in (-sin(angle * i), cos(angle * i))]
+
+ if bottom:
+ loops += [0.5 - co / 2 for i in range(n - 1, -1, -1)
+ for co in (sin(angle * i), cos(angle * i))]
+
+ importMesh_ApplyTextureToLoops(bpymesh, bpyima, loops)
+
+ bpymesh.update()
return bpymesh
-def importMesh_Cylinder(geom, ancestry):
- # bpymesh = bpy.data.meshes.new()
- diameter = geom.getFieldAsFloat('radius', 1.0, ancestry)
+def importMesh_Cone(geom, ancestry, bpyima):
+ # Solid ignored
+ # Extra parameter subdivision="n" - how many faces to use
+ n = geom.getFieldAsInt('subdivision', GLOBALS['CIRCLE_DETAIL'], ancestry)
+ radius = geom.getFieldAsFloat('bottomRadius', 1.0, ancestry)
height = geom.getFieldAsFloat('height', 2, ancestry)
+ bottom = geom.getFieldAsBool('bottom', True, ancestry)
+ side = geom.getFieldAsBool('side', True, ancestry)
- # bpymesh = Mesh.Primitives.Cylinder(GLOBALS['CIRCLE_DETAIL'], diameter, height)
+ d = height / 2
+ angle = 2 * pi / n
- bpy.ops.mesh.primitive_cylinder_add(vertices=GLOBALS['CIRCLE_DETAIL'],
- radius=diameter,
- depth=height,
- end_fill_type='NGON',
- view_align=False,
- enter_editmode=False,
- )
+ verts = [(0, d, 0)]
+ verts += [(-radius * sin(angle * i),
+ -d,
+ -radius * cos(angle * i)) for i in range(n)]
+ faces = []
- bpymesh = bpy_ops_add_object_hack()
+ # Side face vertices go: up down right
+ if side:
+ faces += [(1 + (i + 1) % n, 0, 1 + i) for i in range(n)]
+ if bottom:
+ faces += [[i for i in range(n, 0, -1)]]
+
+ bpymesh = bpy.data.meshes.new(name="Cone")
+ bpymesh.from_pydata(verts, [], faces)
+
+ bpymesh.validate(False)
+ if bpyima:
+ loops = []
+ if side:
+ loops += [co for i in range(n)
+ for co in ((i + 1) / n, 0, (i + 0.5) / n, 1, i / n, 0)]
+ if bottom:
+ loops += [0.5 - co / 2 for i in range(n - 1, -1, -1)
+ for co in (sin(angle * i), cos(angle * i))]
+ importMesh_ApplyTextureToLoops(bpymesh, bpyima, loops)
- bpymesh.transform(MATRIX_Z_TO_Y)
+ bpymesh.update()
+ return bpymesh
- # Warning - Rely in the order Blender adds verts
- # not nice design but wont change soon.
- bottom = geom.getFieldAsBool('bottom', True, ancestry)
- side = geom.getFieldAsBool('side', True, ancestry)
- top = geom.getFieldAsBool('top', True, ancestry)
+def importMesh_Box(geom, ancestry, bpyima):
+ # Solid is ignored
+ # No ccw in this element
+ (dx, dy, dz) = geom.getFieldAsFloatTuple('size', (2.0, 2.0, 2.0), ancestry)
+ dx /= 2
+ dy /= 2
+ dz /= 2
+
+ bpymesh = bpy.data.meshes.new(name="Box")
+ bpymesh.vertices.add(8)
+
+ # xz plane at +y, ccw
+ co = (dx, dy, dz, -dx, dy, dz, -dx, dy, -dz, dx, dy, -dz,
+ # xz plane at -y
+ dx, -dy, dz, -dx, -dy, dz, -dx, -dy, -dz, dx, -dy, -dz)
+ bpymesh.vertices.foreach_set('co', co)
+
+ bpymesh.tessfaces.add(6)
+ bpymesh.tessfaces.foreach_set('vertices_raw', (
+ 0, 1, 2, 3, # +y
+ 4, 0, 3, 7, # +x
+ 7, 3, 2, 6, # -z
+ 6, 2, 1, 5, # -x
+ 5, 1, 0, 4, # +z
+ 7, 6, 5, 4)) # -y
+
+ bpymesh.validate(False)
+ if bpyima:
+ d = bpymesh.tessface_uv_textures.new().data
+ for face in d: # No foreach_set for nonscalars
+ face.image = bpyima
+ d.foreach_set('uv_raw', (
+ 1, 0, 0, 0, 0, 1, 1, 1,
+ 0, 0, 0, 1, 1, 1, 1, 0,
+ 0, 0, 0, 1, 1, 1, 1, 0,
+ 0, 0, 0, 1, 1, 1, 1, 0,
+ 0, 0, 0, 1, 1, 1, 1, 0,
+ 1, 0, 0, 0, 0, 1, 1, 1))
- if not top: # last vert is top center of tri fan.
- # bpymesh.vertices.delete([(GLOBALS['CIRCLE_DETAIL'] + GLOBALS['CIRCLE_DETAIL']) + 1]) # XXX25
- pass
+ bpymesh.update()
+ return bpymesh
- if not bottom: # second last vert is bottom of triangle fan
- # XXX25
- # bpymesh.vertices.delete([GLOBALS['CIRCLE_DETAIL'] + GLOBALS['CIRCLE_DETAIL']])
- pass
+# -----------------------------------------------------------------------------------
+# Utilities for importShape
+
+
+# Textures are processed elsewhere.
+def appearance_CreateMaterial(vrmlname, mat, ancestry, is_vcol):
+ # Given an X3D material, creates a Blender material.
+ # texture is applied later, in appearance_Create().
+ # All values between 0.0 and 1.0, defaults from VRML docs.
+ bpymat = bpy.data.materials.new(vrmlname)
+ bpymat.ambient = mat.getFieldAsFloat('ambientIntensity', 0.2, ancestry)
+ diff_color = mat.getFieldAsFloatTuple('diffuseColor',
+ [0.8, 0.8, 0.8],
+ ancestry)
+ bpymat.diffuse_color = diff_color
+
+ # NOTE - blender dosnt support emmisive color
+ # Store in mirror color and approximate with emit.
+ emit = mat.getFieldAsFloatTuple('emissiveColor', [0.0, 0.0, 0.0], ancestry)
+ bpymat.mirror_color = emit
+ bpymat.emit = (emit[0] + emit[1] + emit[2]) / 3.0
+
+ shininess = mat.getFieldAsFloat('shininess', 0.2, ancestry)
+ bpymat.specular_hardness = int(1 + (510 * shininess))
+ # 0-1 -> 1-511
+ bpymat.specular_color = mat.getFieldAsFloatTuple('specularColor',
+ [0.0, 0.0, 0.0], ancestry)
+ bpymat.alpha = 1.0 - mat.getFieldAsFloat('transparency', 0.0, ancestry)
+ if bpymat.alpha < 0.999:
+ bpymat.use_transparency = True
+ if is_vcol:
+ bpymat.use_vertex_color_paint = True
+ return bpymat
+
+
+def appearance_CreateDefaultMaterial():
+ # Just applies the X3D defaults. Used for shapes
+ # without explicit material definition
+ # (but possibly with a texture).
+
+ bpymat = bpy.data.materials.new("Material")
+ bpymat.ambient = 0.2
+ bpymat.diffuse_color = [0.8, 0.8, 0.8]
+ bpymat.mirror_color = (0, 0, 0)
+ bpymat.emit = 0
+
+ bpymat.specular_hardness = 103
+ # 0-1 -> 1-511
+ bpymat.specular_color = (0, 0, 0)
+ bpymat.alpha = 1
+ return bpymat
+
+
+def appearance_LoadImageTextureFile(ima_urls, node):
+ bpyima = None
+ for f in ima_urls:
+ dirname = os.path.dirname(node.getFilename())
+ bpyima = image_utils.load_image(f, dirname,
+ place_holder=False,
+ recursive=False,
+ convert_callback=imageConvertCompat)
+ if bpyima:
+ break
- if not side:
- # remove all quads
- # XXX25
- # bpymesh.tessfaces.delete(1, [f for f in bpymesh.tessfaces if len(f) == 4])
- pass
+ return bpyima
- return bpymesh
+def appearance_LoadImageTexture(imageTexture, ancestry, node):
+ # TODO: cache loaded textures...
+ ima_urls = imageTexture.getFieldAsString('url', None, ancestry)
-def importMesh_Cone(geom, ancestry):
- # bpymesh = bpy.data.meshes.new()
- diameter = geom.getFieldAsFloat('bottomRadius', 1.0, ancestry)
- height = geom.getFieldAsFloat('height', 2, ancestry)
+ if ima_urls is None:
+ try:
+ ima_urls = imageTexture.getFieldAsStringArray('url', ancestry)
+ # in some cases we get a list of images.
+ except:
+ ima_urls = None
+ else:
+ if '" "' in ima_urls:
+ # '"foo" "bar"' --> ['foo', 'bar']
+ ima_urls = [w.strip('"') for w in ima_urls.split('" "')]
+ else:
+ ima_urls = [ima_urls]
+ # ima_urls is a list or None
- # bpymesh = Mesh.Primitives.Cone(GLOBALS['CIRCLE_DETAIL'], diameter, height)
+ if ima_urls is None:
+ print("\twarning, image with no URL, this is odd")
+ return None
+ else:
+ bpyima = appearance_LoadImageTextureFile(ima_urls, node)
- bpy.ops.mesh.primitive_cone_add(vertices=GLOBALS['CIRCLE_DETAIL'],
- radius1=diameter,
- radius2=0,
- depth=height,
- end_fill_type='NGON',
- view_align=False,
- enter_editmode=False,
- )
+ if not bpyima:
+ print("ImportX3D warning: unable to load texture", ima_urls)
+ else:
+ # KNOWN BUG; PNGs with a transparent color are not perceived
+ # as transparent. Need alpha channel.
+
+ bpyima.use_alpha = bpyima.depth in {32, 128}
+ return bpyima
+
+
+def appearance_LoadTexture(tex_node, ancestry, node):
+ # Both USE-based caching and desc-based caching
+ # Works for bother ImageTextures and PixelTextures
+
+ # USE-based caching
+ if tex_node.reference:
+ return tex_node.getRealNode().parsed
+
+ # Desc-based caching. It might misfire on multifile models, where the
+ # same desc means different things in different files.
+ # TODO: move caches to file level.
+ desc = tex_node.desc()
+ if desc and desc in texture_cache:
+ bpyima = texture_cache[desc]
+ if tex_node.canHaveReferences():
+ tex_node.parsed = bpyima
+ return bpyima
+
+ # No cached texture, load it.
+ if tex_node.getSpec() == 'ImageTexture':
+ bpyima = appearance_LoadImageTexture(tex_node, ancestry, node)
+ else: # PixelTexture
+ bpyima = appearance_LoadPixelTexture(tex_node, ancestry)
+
+ if bpyima: # Loading can still fail
+ repeat_s = tex_node.getFieldAsBool('repeatS', True, ancestry)
+ bpyima.use_clamp_x = not repeat_s
+ repeat_t = tex_node.getFieldAsBool('repeatT', True, ancestry)
+ bpyima.use_clamp_y = not repeat_t
+
+ # Update the desc-based cache
+ if desc:
+ texture_cache[desc] = bpyima
+
+ # Update the USE-based cache
+ if tex_node.canHaveReferences():
+ tex_node.parsed = bpyima
+
+ return bpyima
+
+
+def appearance_ExpandCachedMaterial(bpymat):
+ if bpymat.texture_slots[0] is not None:
+ bpyima = bpymat.texture_slots[0].texture.image
+ tex_has_alpha = bpyima.use_alpha
+ return (bpymat, bpyima, tex_has_alpha)
+
+ return (bpymat, None, False)
+
+
+def appearance_MakeDescCacheKey(material, tex_node):
+ mat_desc = material.desc() if material else "Default"
+ tex_desc = tex_node.desc() if tex_node else "Default"
+
+ if not((tex_node and tex_desc is None) or
+ (material and mat_desc is None)):
+ # desc not available (in VRML)
+ # TODO: serialize VRML nodes!!!
+ return (mat_desc, tex_desc)
+ elif not tex_node and not material:
+ # Even for VRML, we cache the null material
+ return ("Default", "Default")
+ else:
+ return None # Desc-based caching is off
- bpymesh = bpy_ops_add_object_hack()
- bpymesh.transform(MATRIX_Z_TO_Y)
+def appearance_Create(vrmlname, material, tex_node, ancestry, node, is_vcol):
+ # Creates a Blender material object from appearance
+ bpyima = None
+ tex_has_alpha = False
- # Warning - Rely in the order Blender adds verts
- # not nice design but wont change soon.
+ if material:
+ bpymat = appearance_CreateMaterial(vrmlname, material, ancestry, is_vcol)
+ else:
+ bpymat = appearance_CreateDefaultMaterial()
- bottom = geom.getFieldAsBool('bottom', True, ancestry)
- side = geom.getFieldAsBool('side', True, ancestry)
+ if tex_node: # Texture caching inside there
+ bpyima = appearance_LoadTexture(tex_node, ancestry, node)
- if not bottom: # last vert is on the bottom
- # bpymesh.vertices.delete([GLOBALS['CIRCLE_DETAIL'] + 1]) # XXX25
- pass
- if not side: # second last vert is on the pointy bit of the cone
- # bpymesh.vertices.delete([GLOBALS['CIRCLE_DETAIL']]) # XXX25
- pass
+ if is_vcol:
+ bpymat.use_vertex_color_paint = True
- return bpymesh
+ if bpyima:
+ tex_has_alpha = bpyima.use_alpha
+ texture = bpy.data.textures.new(bpyima.name, 'IMAGE')
+ texture.image = bpyima
-def importMesh_Box(geom, ancestry):
- # bpymesh = bpy.data.meshes.new()
+ mtex = bpymat.texture_slots.add()
+ mtex.texture = texture
- size = geom.getFieldAsFloatTuple('size', (2.0, 2.0, 2.0), ancestry)
+ mtex.texture_coords = 'UV'
+ mtex.use_map_diffuse = True
+ mtex.use = True
- # bpymesh = Mesh.Primitives.Cube(1.0)
- bpy.ops.mesh.primitive_cube_add(view_align=False,
- enter_editmode=False,
- )
+ if bpyima.use_alpha:
+ bpymat.use_transparency = True
+ mtex.use_map_alpha = True
+ mtex.alpha_factor = 0.0
- bpymesh = bpy_ops_add_object_hack()
+ return (bpymat, bpyima, tex_has_alpha)
- # Scale the box to the size set
- scale_mat = Matrix(((size[0], 0, 0), (0, size[1], 0), (0, 0, size[2]))) * 0.5
- bpymesh.transform(scale_mat.to_4x4())
- return bpymesh
+def importShape_LoadAppearance(vrmlname, appr, ancestry, node, is_vcol):
+ """
+ Material creation takes nontrivial time on large models.
+ So we cache them aggressively.
+ However, in Blender, texture is a part of material, while in
+ X3D it's not. Blender's notion of material corresponds to
+ X3D's notion of appearance.
+ TextureTransform is not a part of material (at least
+ not in the current implementation).
-def importShape(node, ancestry, global_matrix):
- def apply_texmtx(blendata, texmtx):
- for luv in bpydata.uv_layers.active.data:
- luv.uv = texmtx * luv.uv
+ USE on an Appearance node and USE on a Material node
+ call for different approaches.
- bpyob = node.getRealNode().blendObject
+ Tools generate repeating, idential material definitions.
+ Can't rely on USE alone. Repeating texture definitions
+ are entirely possible, too.
- if bpyob is not None:
- bpyob = node.blendData = node.blendObject = bpyob.copy()
- bpy.context.scene.objects.link(bpyob).select = True
- else:
- vrmlname = node.getDefName()
- if not vrmlname:
- vrmlname = 'Shape'
-
- # works 100% in vrml, but not x3d
- #appr = node.getChildByName('appearance') # , 'Appearance'
- #geom = node.getChildByName('geometry') # , 'IndexedFaceSet'
-
- # Works in vrml and x3d
- appr = node.getChildBySpec('Appearance')
- geom = node.getChildBySpec(['IndexedFaceSet', 'IndexedLineSet', 'PointSet', 'Sphere', 'Box', 'Cylinder', 'Cone'])
-
- # For now only import IndexedFaceSet's
- if geom:
- bpymat = None
- bpyima = None
- texmtx = None
-
- image_depth = 0 # so we can set alpha face flag later
- is_vcol = (geom.getChildBySpec('Color') is not None)
-
- if appr:
- #mat = appr.getChildByName('material') # 'Material'
- #ima = appr.getChildByName('texture') # , 'ImageTexture'
- #if ima and ima.getSpec() != 'ImageTexture':
- # print('\tWarning: texture type "%s" is not supported' % ima.getSpec())
- # ima = None
- # textx = appr.getChildByName('textureTransform')
-
- mat = appr.getChildBySpec('Material')
- ima = appr.getChildBySpec('ImageTexture')
-
- textx = appr.getChildBySpec('TextureTransform')
-
- if textx:
- texmtx = translateTexTransform(textx, ancestry)
-
- bpymat = appr.getRealNode().blendData
-
- if bpymat is None:
- # print(mat, ima)
- if mat or ima:
- if not mat:
- mat = ima # This is a bit dumb, but just means we use default values for all
-
- # all values between 0.0 and 1.0, defaults from VRML docs
- bpymat = bpy.data.materials.new(vrmlname)
- bpymat.ambient = mat.getFieldAsFloat('ambientIntensity', 0.2, ancestry)
- bpymat.diffuse_color = mat.getFieldAsFloatTuple('diffuseColor', [0.8, 0.8, 0.8], ancestry)
-
- # NOTE - blender dosnt support emmisive color
- # Store in mirror color and approximate with emit.
- emit = mat.getFieldAsFloatTuple('emissiveColor', [0.0, 0.0, 0.0], ancestry)
- bpymat.mirror_color = emit
- bpymat.emit = (emit[0] + emit[1] + emit[2]) / 3.0
-
- bpymat.specular_hardness = int(1 + (510 * mat.getFieldAsFloat('shininess', 0.2, ancestry))) # 0-1 -> 1-511
- bpymat.specular_color = mat.getFieldAsFloatTuple('specularColor', [0.0, 0.0, 0.0], ancestry)
- bpymat.alpha = 1.0 - mat.getFieldAsFloat('transparency', 0.0, ancestry)
- if bpymat.alpha < 0.999:
- bpymat.use_transparency = True
- if is_vcol:
- bpymat.use_vertex_color_paint = True
-
- if ima:
- bpyima = ima.getRealNode().blendData
-
- if bpyima is None:
- ima_urls = ima.getFieldAsString('url', None, ancestry)
-
- if ima_urls is None:
- try:
- ima_urls = ima.getFieldAsStringArray('url', ancestry) # in some cases we get a list of images.
- except:
- ima_urls = None
- else:
- if '" "' in ima_urls:
- # '"foo" "bar"' --> ['foo', 'bar']
- ima_urls = [w.strip('"') for w in ima_urls.split('" "')]
- else:
- ima_urls = [ima_urls]
- # ima_urls is a list or None
-
- if ima_urls is None:
- print("\twarning, image with no URL, this is odd")
- else:
- for f in ima_urls:
- bpyima = image_utils.load_image(f, os.path.dirname(node.getFilename()), place_holder=False,
- recursive=False, convert_callback=imageConvertCompat)
- if bpyima:
- break
-
- if bpyima:
- texture = bpy.data.textures.new(bpyima.name, 'IMAGE')
- texture.image = bpyima
-
- # Adds textures for materials (rendering)
- try:
- image_depth = bpyima.depth
- except:
- image_depth = -1
-
- mtex = bpymat.texture_slots.add()
- mtex.texture = texture
-
- mtex.texture_coords = 'UV'
- mtex.use_map_diffuse = True
-
- if image_depth in {32, 128}:
- bpymat.use_transparency = True
- mtex.use_map_alpha = True
- mtex.alpha_factor = 0.0
-
- ima_repS = ima.getFieldAsBool('repeatS', True, ancestry)
- ima_repT = ima.getFieldAsBool('repeatT', True, ancestry)
-
- # To make this work properly we'd need to scale the UV's too, better to ignore th
- # texture.repeat = max(1, ima_repS * 512), max(1, ima_repT * 512)
-
- if not ima_repS:
- bpyima.use_clamp_x = True
- if not ima_repT:
- bpyima.use_clamp_y = True
- elif ima:
- bpyima = ima.getRealNode().blendData
-
- appr.blendData = bpymat
- if ima:
- ima.blendData = bpyima
-
- bpydata = geom.getRealNode().blendData
- if bpydata is None:
- geom_spec = geom.getSpec()
- ccw = True
- if geom_spec == 'IndexedFaceSet':
- bpydata, ccw = importMesh_IndexedFaceSet(geom, bpyima, ancestry)
- elif geom_spec == 'IndexedLineSet':
- bpydata = importMesh_IndexedLineSet(geom, ancestry)
- elif geom_spec == 'PointSet':
- bpydata = importMesh_PointSet(geom, ancestry)
- elif geom_spec == 'Sphere':
- bpydata = importMesh_Sphere(geom, ancestry)
- elif geom_spec == 'Box':
- bpydata = importMesh_Box(geom, ancestry)
- elif geom_spec == 'Cylinder':
- bpydata = importMesh_Cylinder(geom, ancestry)
- elif geom_spec == 'Cone':
- bpydata = importMesh_Cone(geom, ancestry)
- else:
- print('\tWarning: unsupported type "%s"' % geom_spec)
- return
+ Vertex coloring is not a part of appearance, but Blender
+ has a material flag for it. However, if a mesh has no vertex
+ color layer, setting use_vertex_color_paint to true has no
+ effect. So it's fine to reuse the same material for meshes
+ with vertex colors and for ones without.
+ It's probably an abuse of Blender of some level.
- if bpydata:
- vrmlname = vrmlname + geom_spec
- bpydata.name = vrmlname
+ So here's the caching structure:
+ For USE on apprearance, we store the material object
+ in the appearance node.
- if type(bpydata) == bpy.types.Mesh:
- is_solid = geom.getFieldAsBool('solid', True, ancestry)
- creaseAngle = geom.getFieldAsFloat('creaseAngle', None, ancestry)
+ For USE on texture, we store the image object in the tex node.
- if creaseAngle is not None:
- bpydata.auto_smooth_angle = creaseAngle
- bpydata.use_auto_smooth = True
+ For USE on material with no texture, we store the material object
+ in the material node.
- # Only ever 1 material per shape
- if bpymat:
- bpydata.materials.append(bpymat)
+ Also, we store textures by description in texture_cache.
- if bpydata.uv_layers:
- if texmtx:
- # Apply texture transform?
- apply_texmtx(blendata, texmtx)
- # Done transforming the texture
+ Also, we store materials by (material desc, texture desc)
+ in material_cache.
+ """
+ # First, check entire-appearance cache
+ if appr.reference and appr.getRealNode().parsed:
+ return appearance_ExpandCachedMaterial(appr.getRealNode().parsed)
+
+ tex_node = appr.getChildBySpec(('ImageTexture', 'PixelTexture'))
+ # Other texture nodes are: MovieTexture, MultiTexture
+ material = appr.getChildBySpec('Material')
+ # We're ignoring FillProperties, LineProperties, and shaders
+
+ # Check the USE-based material cache for textureless materials
+ if material and material.reference and not tex_node and material.getRealNode().parsed:
+ return appearance_ExpandCachedMaterial(material.getRealNode().parsed)
+
+ # Now the description-based caching
+ cache_key = appearance_MakeDescCacheKey(material, tex_node)
+
+ if cache_key and cache_key in material_cache:
+ bpymat = material_cache[cache_key]
+ # Still want to make the material available for USE-based reuse
+ if appr.canHaveReferences():
+ appr.parsed = bpymat
+ if material and material.canHaveReferences() and not tex_node:
+ material.parsed = bpymat
+ return appearance_ExpandCachedMaterial(bpymat)
+
+ # Done checking full-material caches. Texture cache may still kick in.
+ # Create the material already
+ (bpymat, bpyima, tex_has_alpha) = appearance_Create(vrmlname, material, tex_node, ancestry, node, is_vcol)
+
+ # Update the caches
+ if appr.canHaveReferences():
+ appr.parsed = bpymat
+
+ if cache_key:
+ material_cache[cache_key] = bpymat
+
+ if material and material.canHaveReferences() and not tex_node:
+ material.parsed = bpymat
+
+ return (bpymat, bpyima, tex_has_alpha)
+
+
+def appearance_LoadPixelTexture(pixelTexture, ancestry):
+ image = pixelTexture.getFieldAsArray('image', 0, ancestry)
+ (w, h, plane_count) = image[0:3]
+ has_alpha = plane_count in {2, 4}
+ pixels = image[3:]
+ if len(pixels) != w * h:
+ print("ImportX3D warning: pixel count in PixelTexture is off")
+
+ bpyima = bpy.data.images.new("PixelTexture", w, h, has_alpha, True)
+ bpyima.use_alpha = has_alpha
+
+ # Conditional above the loop, for performance
+ if plane_count == 3: # RGB
+ bpyima.pixels = [(cco & 0xff) / 255 for pixel in pixels
+ for cco in (pixel >> 16, pixel >> 8, pixel, 255)]
+ elif plane_count == 4: # RGBA
+ bpyima.pixels = [(cco & 0xff) / 255 for pixel in pixels
+ for cco
+ in (pixel >> 24, pixel >> 16, pixel >> 8, pixel)]
+ elif plane_count == 1: # Intensity - does Blender even support that?
+ bpyima.pixels = [(cco & 0xff) / 255 for pixel in pixels
+ for cco in (pixel, pixel, pixel, 255)]
+ elif plane_count == 2: # Intensity/aplha
+ bpyima.pixels = [(cco & 0xff) / 255 for pixel in pixels
+ for cco
+ in (pixel >> 8, pixel >> 8, pixel >> 8, pixel)]
+ bpyima.update()
+ return bpyima
+
+
+# Called from importShape to insert a data object (typically a mesh)
+# into the scene
+def importShape_ProcessObject(
+ bpyscene, vrmlname, bpydata, geom, geom_spec, node,
+ bpymat, has_alpha, texmtx, ancestry,
+ global_matrix):
+
+ vrmlname += geom_spec
+ bpydata.name = vrmlname
+
+ if type(bpydata) == bpy.types.Mesh:
+ # solid, as understood by the spec, is always true in Blender
+ # solid=false, we don't support it yet.
+ creaseAngle = geom.getFieldAsFloat('creaseAngle', None, ancestry)
+ if creaseAngle is not None:
+ bpydata.auto_smooth_angle = creaseAngle
+ bpydata.use_auto_smooth = True
+
+ # Only ever 1 material per shape
+ if bpymat:
+ bpydata.materials.append(bpymat)
+
+ if bpydata.tessface_uv_textures:
+ if has_alpha: # set the faces alpha flag?
+ # transp = Mesh.FaceTranspModes.ALPHA
+ for f in bpydata.tessface_uv_textures.active.data:
+ f.blend_type = 'ALPHA'
+
+ if texmtx:
+ # Apply texture transform?
+ uv_copy = Vector()
+ for f in bpydata.tessface_uv_textures.active.data:
+ fuv = f.uv
+ for i, uv in enumerate(fuv):
+ uv_copy.x = uv[0]
+ uv_copy.y = uv[1]
+
+ fuv[i] = (uv_copy * texmtx)[0:2]
+ # Done transforming the texture
+ # TODO: check if per-polygon textures are supported here.
+ elif type(bpydata) == bpy.types.TextCurve:
+ # Text with textures??? Not sure...
+ if bpymat:
+ bpydata.materials.append(bpymat)
+
+ # Can transform data or object, better the object so we can instance
+ # the data
+ # bpymesh.transform(getFinalMatrix(node))
+ bpyob = node.blendObject = bpy.data.objects.new(vrmlname, bpydata)
+ bpyob.matrix_world = getFinalMatrix(node, None, ancestry, global_matrix)
+ bpyscene.objects.link(bpyob).select = True
- # Must be here and not in IndexedFaceSet because it needs an object for the flip func. Messy :/
- if not ccw:
- # bpydata.flipNormals()
- # XXX25
- pass
+ if DEBUG:
+ bpyob["source_line_no"] = geom.lineno
- # else could be a curve for example
- # if texmtx is defined, we need specific UVMap, hence a copy of the mesh...
- elif texmtx and blendata.uv_layers:
- bpydata = bpydata.copy()
- apply_texmtx(blendata, texmtx)
+def importText(geom, ancestry, bpyima):
+ fmt = geom.getChildBySpec('FontStyle')
+ size = fmt.getFieldAsFloat("size", 1, ancestry) if fmt else 1.
+ body = geom.getFieldAsString("string", None, ancestry)
+ body = [w.strip('"') for w in body.split('" "')]
- geom.blendData = bpydata
+ bpytext = bpy.data.curves.new(name="Text", type='FONT')
+ bpytext.offset_y = - size
+ bpytext.body = "\n".join(body)
+ bpytext.size = size
+ return bpytext
- if bpydata:
- bpyob = node.blendData = node.blendObject = bpy.data.objects.new(vrmlname, bpydata)
- bpy.context.scene.objects.link(bpyob).select = True
- if bpyob:
+# -----------------------------------------------------------------------------------
+
+
+geometry_importers = {
+ 'IndexedFaceSet': importMesh_IndexedFaceSet,
+ 'IndexedTriangleSet': importMesh_IndexedTriangleSet,
+ 'IndexedTriangleStripSet': importMesh_IndexedTriangleStripSet,
+ 'IndexedTriangleFanSet': importMesh_IndexedTriangleFanSet,
+ 'IndexedLineSet': importMesh_IndexedLineSet,
+ 'TriangleSet': importMesh_TriangleSet,
+ 'TriangleStripSet': importMesh_TriangleStripSet,
+ 'TriangleFanSet': importMesh_TriangleFanSet,
+ 'LineSet': importMesh_LineSet,
+ 'ElevationGrid': importMesh_ElevationGrid,
+ 'Extrusion': importMesh_Extrusion,
+ 'PointSet': importMesh_PointSet,
+ 'Sphere': importMesh_Sphere,
+ 'Box': importMesh_Box,
+ 'Cylinder': importMesh_Cylinder,
+ 'Cone': importMesh_Cone,
+ 'Text': importText,
+ }
+
+
+def importShape(bpyscene, node, ancestry, global_matrix):
+ # Under Shape, we can only have Appearance, MetadataXXX and a geometry node
+ def isGeometry(spec):
+ return spec != "Appearance" and not spec.startswith("Metadata")
+
+ bpyob = node.getRealNode().blendObject
+
+ if bpyob is not None:
+ bpyob = node.blendData = node.blendObject = bpyob.copy()
# Could transform data, but better the object so we can instance the data
bpyob.matrix_world = getFinalMatrix(node, None, ancestry, global_matrix)
+ bpyscene.objects.link(bpyob).select = True
+ return
+
+ vrmlname = node.getDefName()
+ if not vrmlname:
+ vrmlname = 'Shape'
+
+ appr = node.getChildBySpec('Appearance')
+ geom = node.getChildBySpecCondition(isGeometry)
+ if not geom:
+ # Oh well, no geometry node in this shape
+ return
+
+ bpymat = None
+ bpyima = None
+ texmtx = None
+ tex_has_alpha = False
+
+ is_vcol = (geom.getChildBySpec(['Color', 'ColorRGBA']) is not None)
+
+ if appr:
+ (bpymat, bpyima,
+ tex_has_alpha) = importShape_LoadAppearance(vrmlname, appr,
+ ancestry, node,
+ is_vcol)
+
+ textx = appr.getChildBySpec('TextureTransform')
+ if textx:
+ texmtx = translateTexTransform(textx, ancestry)
+
+ bpydata = None
+ geom_spec = geom.getSpec()
+
+ # ccw is handled by every geometry importer separately; some
+ # geometries are easier to flip than others
+ geom_fn = geometry_importers.get(geom_spec)
+ if geom_fn is not None:
+ bpydata = geom_fn(geom, ancestry, bpyima)
+
+ # There are no geometry importers that can legally return
+ # no object. It's either a bpy object, or an exception
+ importShape_ProcessObject(
+ bpyscene, vrmlname, bpydata, geom, geom_spec,
+ node, bpymat, tex_has_alpha, texmtx,
+ ancestry, global_matrix)
+ else:
+ print('\tImportX3D warning: unsupported type "%s"' % geom_spec)
+
+
+# -----------------------------------------------------------------------------------
+# Lighting
def importLamp_PointLight(node, ancestry):
@@ -2260,7 +3144,7 @@ def importLamp_PointLight(node, ancestry):
# is_on = node.getFieldAsBool('on', True, ancestry) # TODO
radius = node.getFieldAsFloat('radius', 100.0, ancestry)
- bpylamp = bpy.data.lamps.new("ToDo", 'POINT')
+ bpylamp = bpy.data.lamps.new(vrmlname, 'POINT')
bpylamp.energy = intensity
bpylamp.distance = radius
bpylamp.color = color
@@ -2330,7 +3214,7 @@ def importLamp_SpotLight(node, ancestry):
return bpylamp, mtx
-def importLamp(node, spec, ancestry, global_matrix):
+def importLamp(bpyscene, node, spec, ancestry, global_matrix):
if spec == 'PointLight':
bpylamp, mtx = importLamp_PointLight(node, ancestry)
elif spec == 'DirectionalLight':
@@ -2341,13 +3225,16 @@ def importLamp(node, spec, ancestry, global_matrix):
print("Error, not a lamp")
raise ValueError
- bpyob = node.blendData = node.blendObject = bpy.data.objects.new("TODO", bpylamp)
- bpy.context.scene.objects.link(bpyob).select = True
+ bpyob = node.blendData = node.blendObject = bpy.data.objects.new(bpylamp.name, bpylamp)
+ bpyscene.objects.link(bpyob).select = True
bpyob.matrix_world = getFinalMatrix(node, mtx, ancestry, global_matrix)
-def importViewpoint(node, ancestry, global_matrix):
+# -----------------------------------------------------------------------------------
+
+
+def importViewpoint(bpyscene, node, ancestry, global_matrix):
name = node.getDefName()
if not name:
name = 'Viewpoint'
@@ -2365,17 +3252,17 @@ def importViewpoint(node, ancestry, global_matrix):
mtx = Matrix.Translation(Vector(position)) * translateRotation(orientation)
bpyob = node.blendData = node.blendObject = bpy.data.objects.new(name, bpycam)
- bpy.context.scene.objects.link(bpyob).select = True
+ bpyscene.objects.link(bpyob).select = True
bpyob.matrix_world = getFinalMatrix(node, mtx, ancestry, global_matrix)
-def importTransform(node, ancestry, global_matrix):
+def importTransform(bpyscene, node, ancestry, global_matrix):
name = node.getDefName()
if not name:
name = 'Transform'
bpyob = node.blendData = node.blendObject = bpy.data.objects.new(name, None)
- bpy.context.scene.objects.link(bpyob).select = True
+ bpyscene.objects.link(bpyob).select = True
bpyob.matrix_world = getFinalMatrix(node, None, ancestry, global_matrix)
@@ -2559,21 +3446,24 @@ ROUTE champFly001.bindTime TO vpTs.set_startTime
translateTimeSensor(time_node, action, ancestry)
-def load_web3d(path,
- PREF_FLAT=False,
- PREF_CIRCLE_DIV=16,
- global_matrix=None,
- HELPER_FUNC=None,
- ):
+def load_web3d(
+ bpyscene,
+ filepath,
+ *,
+ PREF_FLAT=False,
+ PREF_CIRCLE_DIV=16,
+ global_matrix=None,
+ HELPER_FUNC=None
+ ):
# Used when adding blender primitives
GLOBALS['CIRCLE_DETAIL'] = PREF_CIRCLE_DIV
#root_node = vrml_parse('/_Cylinder.wrl')
- if path.lower().endswith('.x3d'):
- root_node, msg = x3d_parse(path)
+ if filepath.lower().endswith('.x3d'):
+ root_node, msg = x3d_parse(filepath)
else:
- root_node, msg = vrml_parse(path)
+ root_node, msg = vrml_parse(filepath)
if not root_node:
print(msg)
@@ -2601,15 +3491,15 @@ def load_web3d(path,
# by an external script. - gets first pick
pass
if spec == 'Shape':
- importShape(node, ancestry, global_matrix)
+ importShape(bpyscene, node, ancestry, global_matrix)
elif spec in {'PointLight', 'DirectionalLight', 'SpotLight'}:
- importLamp(node, spec, ancestry, global_matrix)
+ importLamp(bpyscene, node, spec, ancestry, global_matrix)
elif spec == 'Viewpoint':
- importViewpoint(node, ancestry, global_matrix)
+ importViewpoint(bpyscene, node, ancestry, global_matrix)
elif spec == 'Transform':
# Only use transform nodes when we are not importing a flat object hierarchy
if PREF_FLAT == False:
- importTransform(node, ancestry, global_matrix)
+ importTransform(bpyscene, node, ancestry, global_matrix)
'''
# These are delt with later within importRoute
elif spec=='PositionInterpolator':
@@ -2634,7 +3524,7 @@ def load_web3d(path,
node = defDict[key]
if node.blendData is None: # Add an object if we need one for animation
node.blendData = node.blendObject = bpy.data.objects.new('AnimOb', None) # , name)
- bpy.context.scene.objects.link(node.blendObject).select = True
+ bpyscene.objects.link(node.blendObject).select = True
if node.blendData.animation_data is None:
node.blendData.animation_data_create()
@@ -2672,13 +3562,36 @@ def load_web3d(path,
c.parent = parent
# update deps
- bpy.context.scene.update()
+ bpyscene.update()
del child_dict
-def load(operator, context, filepath="", global_matrix=None):
-
- load_web3d(filepath,
+def load_with_profiler(
+ context,
+ filepath,
+ *,
+ global_matrix=None
+ ):
+ import cProfile
+ import pstats
+ pro = cProfile.Profile()
+ pro.runctx("load_web3d(context.scene, filepath, PREF_FLAT=True, "
+ "PREF_CIRCLE_DIV=16, global_matrix=global_matrix)",
+ globals(), locals())
+ st = pstats.Stats(pro)
+ st.sort_stats("time")
+ st.print_stats(0.1)
+ # st.print_callers(0.1)
+
+
+def load(context,
+ filepath,
+ *,
+ global_matrix=None
+ ):
+
+ # loadWithProfiler(operator, context, filepath, global_matrix)
+ load_web3d(context.scene, filepath,
PREF_FLAT=True,
PREF_CIRCLE_DIV=16,
global_matrix=global_matrix,
diff --git a/light_field_tools/light_field_tools.py b/light_field_tools/light_field_tools.py
index 017b3466..60f631ed 100644
--- a/light_field_tools/light_field_tools.py
+++ b/light_field_tools/light_field_tools.py
@@ -141,7 +141,7 @@ class OBJECT_OT_create_lightfield_rig(bpy.types.Operator):
cam.data.angle = scene.lightfield.angle
# display options of the camera
- cam.data.lens_unit = 'DEGREES'
+ cam.data.lens_unit = 'FOV'
# handler parent
if scene.lightfield.create_handler:
@@ -187,7 +187,7 @@ class OBJECT_OT_create_lightfield_rig(bpy.types.Operator):
# display options of the camera
cam.data.draw_size = 0.15
- cam.data.lens_unit = 'DEGREES'
+ cam.data.lens_unit = 'FOV'
# handler parent
if scene.lightfield.create_handler:
diff --git a/mesh_bsurfaces.py b/mesh_bsurfaces.py
index 4c572800..a6a6d52f 100644
--- a/mesh_bsurfaces.py
+++ b/mesh_bsurfaces.py
@@ -21,7 +21,7 @@ bl_info = {
"name": "Bsurfaces GPL Edition",
"author": "Eclectiel",
"version": (1, 5),
- "blender": (2, 63, 0),
+ "blender": (2, 76, 0),
"location": "View3D > EditMode > ToolShelf",
"description": "Modeling and retopology tool.",
"wiki_url": "http://wiki.blender.org/index.php/Dev:Ref/Release_Notes/2.64/Bsurfaces_1.5",
@@ -3205,7 +3205,7 @@ class GPENCIL_OT_SURFSK_add_surface(bpy.types.Operator):
bpy.context.user_preferences.edit.use_global_undo = self.initial_global_undo_state
if created_faces_count == 0:
- self.report({'WARNING'}, "There aren't any strokes.")
+ self.report({'WARNING'}, "There aren't any strokes attatched to the object")
return {"CANCELLED"}
else:
return {"FINISHED"}
@@ -3226,7 +3226,7 @@ class GPENCIL_OT_SURFSK_add_surface(bpy.types.Operator):
return{"CANCELLED"}
elif self.strokes_type == "NO_STROKES":
- self.report({'WARNING'}, "There aren't any strokes.")
+ self.report({'WARNING'}, "There aren't any strokes attatched to the object")
return{"CANCELLED"}
elif self.strokes_type == "CURVE_WITH_NON_BEZIER_SPLINES":
@@ -3298,7 +3298,7 @@ class GPENCIL_OT_SURFSK_edit_strokes(bpy.types.Operator):
self.report({'WARNING'}, "There shouldn't be more than one secondary object selected.")
return{"CANCELLED"}
elif self.strokes_type == "NO_STROKES" or self.strokes_type == "SELECTION_ALONE":
- self.report({'WARNING'}, "There aren't any strokes.")
+ self.report({'WARNING'}, "There aren't any strokes attatched to the object")
return{"CANCELLED"}
else:
return{"CANCELLED"}
@@ -3465,7 +3465,7 @@ class CURVE_OT_SURFSK_reorder_splines(bpy.types.Operator):
self.main_curve.data.splines[0].bezier_points[0].select_control_point = True
bpy.ops.object.editmode_toggle('INVOKE_REGION_WIN')
- bpy.ops.curve.separate('INVOKE_REGION_WIN')
+ bpy.ops.curve.separate('EXEC_REGION_WIN')
bpy.ops.object.editmode_toggle('INVOKE_REGION_WIN')
diff --git a/mesh_custom_normals_tools.py b/mesh_custom_normals_tools.py
new file mode 100644
index 00000000..462b7609
--- /dev/null
+++ b/mesh_custom_normals_tools.py
@@ -0,0 +1,90 @@
+# ***** BEGIN GPL LICENSE BLOCK *****
+#
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License
+# as published by the Free Software Foundation; either version 2
+# of the License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+#
+# ***** END GPL LICENCE BLOCK *****
+
+bl_info = {
+ "name": "Custom Normals Tools",
+ "author": "Bastien Montagne (mont29)",
+ "version": (0, 0, 1),
+ "blender": (2, 75, 0),
+ "location": "3DView > Tools",
+ "description": "Various tools/helpers for custom normals",
+ "warning": "",
+ "support": 'OFFICIAL',
+ "category": "Mesh",
+}
+
+
+import bpy
+
+
+class MESH_OT_flip_custom_normals(bpy.types.Operator):
+ """Flip active mesh's normals, including custom ones (only in Object mode)"""
+ bl_idname = "mesh.flip_custom_normals"
+ bl_label = "Flip Custom Normals"
+ bl_options = {'UNDO'}
+
+ @classmethod
+ def poll(cls, context):
+ return context.object and context.object.type == 'MESH' and context.object.mode == 'OBJECT'
+
+ def execute(self, context):
+ me = context.object.data
+
+ if me.has_custom_normals:
+ me.calc_normals_split()
+ clnors = [0.0] * 3 * len(me.loops)
+ me.loops.foreach_get("normal", clnors)
+
+ bpy.ops.object.mode_set(mode='EDIT')
+ bpy.ops.mesh.select_all(action='SELECT')
+ bpy.ops.mesh.flip_normals()
+ bpy.ops.object.mode_set(mode='OBJECT')
+
+ me = context.object.data
+ if me.has_custom_normals:
+ clnors[:] = list(zip(*[(-n for n in clnors)] * 3))
+ # We also have to take in account that the winding was reverted...
+ for p in me.polygons:
+ ls = p.loop_start + 1
+ le = ls + p.loop_total - 1
+ clnors[ls:le] = reversed(clnors[ls:le])
+ me.normals_split_custom_set(clnors)
+
+ context.scene.update()
+ return {'FINISHED'}
+
+
+def flip_custom_normals_draw_func(self, context):
+ if isinstance(self, bpy.types.Panel):
+ self.layout.label("Custom Normal Tools:")
+ self.layout.operator(MESH_OT_flip_custom_normals.bl_idname)
+
+
+def register():
+ bpy.utils.register_module(__name__)
+ bpy.types.VIEW3D_PT_tools_object.append(flip_custom_normals_draw_func)
+
+
+def unregister():
+ bpy.types.VIEW3D_PT_tools_object.remove(flip_custom_normals_draw_func)
+ bpy.utils.unregister_module(__name__)
+
+
+if __name__ == "__main__":
+ register()
diff --git a/mesh_inset/__init__.py b/mesh_inset/__init__.py
index 316e4592..28fa2131 100644
--- a/mesh_inset/__init__.py
+++ b/mesh_inset/__init__.py
@@ -151,10 +151,10 @@ def do_inset(mesh, amount, height, region, as_percent):
for i in range(orig_numv, len(m.points.pos)):
bvertnew = bm.verts.new(m.points.pos[i])
bm.verts.index_update()
+ bm.verts.ensure_lookup_table()
new_faces = []
start_faces = len(bm.faces)
for i, newf in enumerate(blender_faces):
- bm.verts.ensure_lookup_table()
vs = remove_dups([bm.verts[j] for j in newf])
if len(vs) < 3:
continue
@@ -167,13 +167,13 @@ def do_inset(mesh, amount, height, region, as_percent):
# bfacenew.copy_from_face_interp(oldface)
else:
bfacenew = bm.faces.new(vs)
- # remove original faces
+ new_faces.append(bfacenew)
+ # deselect original faces
for face in selfaces:
face.select_set(False)
- bm.faces.remove(face)
- bm.faces.index_update()
- # mesh.update(calc_edges=True)
- # select all new faces
+ # remove original faces
+ bmesh.ops.delete(bm, geom=selfaces, context=5) # 5 = DEL_FACES
+ # select all new faces (should only select inner faces, but that needs more surgery on rest of code)
for face in new_faces:
face.select_set(True)
diff --git a/mocap/mocap_constraints.py b/mocap/mocap_constraints.py
index d9f41ea1..4509193c 100644
--- a/mocap/mocap_constraints.py
+++ b/mocap/mocap_constraints.py
@@ -300,15 +300,16 @@ def setConstraint(m_constraint, context):
print("please wait a moment, calculating fix")
for t in range(s, e):
context.scene.frame_set(t)
- axis = obj.matrix_world.to_3x3() * Vector((0, 0, 100))
+ axis = obj.matrix_world.to_3x3() * Vector((0, 0, 1))
offset = obj.matrix_world.to_3x3() * Vector((0, 0, m_constraint.targetDist))
ray_origin = (cons_obj.matrix * obj.matrix_world).to_translation() - offset # world position of constrained bone
ray_target = ray_origin + axis
#convert ray points to floor's object space
ray_origin = floor.matrix_world.inverted() * ray_origin
ray_target = floor.matrix_world.inverted() * ray_target
- hit, nor, ind = floor.ray_cast(ray_origin, ray_target)
- if hit != Vector((0, 0, 0)):
+ ray_direction = ray_target - ray_origin
+ ok, hit, nor, ind = floor.ray_cast(ray_origin, ray_direction)
+ if ok:
bakedPos[t] = (floor.matrix_world * hit)
bakedPos[t] += Vector((0, 0, m_constraint.targetDist))
else:
diff --git a/modules/rna_manual_reference.py b/modules/rna_manual_reference.py
index 9255aed6..7bdcf69e 100644
--- a/modules/rna_manual_reference.py
+++ b/modules/rna_manual_reference.py
@@ -1,4 +1,4 @@
-# RNA WIKI REFERENCE
+# RNA MANUAL REFERENCE
#
# This file maps RNA to online URL's for right mouse context menu documentation access
#
@@ -6,9 +6,9 @@
# pointing the manuals to the proper language,
# specified in the 'User Preferences Window' by the users
# Some Languages have their manual page, using a prefix or
-# being preceeded by their respective reference, for example
+# being preceded by their respective reference, for example
#
-# Doc:2.6 --> Doc:FR/2.6
+# manual/ --> manual/ru/
#
# The table in the script, contains all of the languages we have in the
# Blender manual website, for those other languages that still
@@ -18,66 +18,47 @@
# to the Blender UI translation table
# The Blender manual uses a list of ISO_639-1 codes to convert languages to manual prefixes
#
-# "DANISH": "DK", - Present in the manual, but not present in Blender UI translations table
-# "FARSI": "FA", - Present in the manual, but not present in Blender UI translations table
-# "KOREAN": "KO", - Present in the manual, but not present in Blender UI translations table
-# "LITHUANIAN": "LT", - Present in the manual, but not present in Blender UI translations table
-# "MACEDONIAN": "MK", - Present in the manual, but not present in Blender UI translations table
-# "MONGOLIAN": "MN", - Present in the manual, but not present in Blender UI translations table
-# "ROMANIAN": "RO", - Present in the manual, but not present in Blender UI translations table
-#
-# "ESTONIAN": "ET", - Present in the manual, as an empty page, not present in UI translations table
-#
-# "CROATIAN": "HR", - Present in Blender UI translations table, but without manual
-# "KYRGYZ": "KY", - Present in Blender UI translations table, but without manual
-# "NEPALI": "NE", - Present in Blender UI translations table, but without manual
-# "PERSIAN": "FA", - Present in Blender UI translations table, but without manual
-# "HEBREW": "HE", - Present in Blender UI translations table, but without manual
-# "HUNGARIAN": "HU", - Present in Blender UI translations table, but without manual
-# "SERBIAN_LATIN": "SR", - Present in Blender UI translations table, but without manual
-#
-# NOTES:
-#
-# CHINESE - Present in the manual as simplified chinese, for both the traditional and simplified
-# PORTUGUESE - Present in the manual for both Portuguese and Brazilian Portuguese
-# THAILANDESE - It's the same being used for Turkish in the manual
-#
-# URL prefix is the: url_manual_prefix + url_manual_mapping[id]
+# URL is the: url_manual_prefix + url_manual_mapping[id]
+
+import bpy
url_manual_prefix = "https://www.blender.org/manual/"
-# TODO
-"""
+language = ""
+if bpy.context.user_preferences.system.use_international_fonts:
+ language = bpy.context.user_preferences.system.language
+ if language == 'DEFAULT':
+ import os
+ language = os.getenv('LANG', '').split('.')[0]
+
LANG = {
- "ARABIC": "AR",
- "BULGARIAN": "BG",
- "CATALAN": "CA",
- "CZECH": "CZ",
- "GERMAN": "DE",
- "GREEK": "EL",
- "RUSSIAN": "RU",
- "SERBIAN": "SR",
- "SWEDISH": "SV",
- "TURKISH": "TH",
- "UKRAINIAN": "UK",
- "SPANISH": "ES",
- "FINNISH": "FI",
- "FRENCH": "FR",
- "INDONESIAN": "ID",
- "ITALIAN": "IT",
- "JAPANESE": "JA",
- "DUTCH": "NL",
- "POLISH": "PL",
- "PORTUGUESE": "PT",
- "BRAZILIANPORTUGUESE": "PT",
- "SIMPLIFIED_CHINESE": "ZH",
- "TRADITIONAL_CHINESE": "ZH",
-}.get(__import__("bpy").context.user_preferences.system.language)
-
-url_manual_prefix = url_manual_prefix \
- if LANG is None \
- else url_manual_prefix.replace("Doc:2.6", "Doc:" + LANG + "/" + "2.6")
-"""
+# "ar_EG": "ar",
+# "bg_BG": "bg",
+# "ca_AD": "ca",
+# "cs_CZ": "cz",
+ "de_DE": "de", # German.
+# "el_GR": "el",
+ "ru_RU": "ru", # Russian.
+# "sr_RS": "sr",
+# "sv_SE": "sv",
+# "tr_TR": "th",
+# "uk_UA": "uk",
+# "es": "es",
+# "fi_FI": "fi",
+ "fr_FR": "fr", # French.
+# "id_ID": "id",
+# "it_IT": "it",
+# "ja_JP": "ja",
+# "nl_NL": "nl",
+# "pl_PL": "pl",
+# "pt_PT": "pt",
+# "pt_BR": "pt",
+ "zh_CN": "zh.cn", # Chinese - Should be changed to "zh_cn" but there is a bug in sphinx-intl.
+ "zh_TW": "zh.cn", # Taiwanese Chinese - for until we have a zh_tw version?
+}.get(language)
+
+if LANG is not None:
+ url_manual_prefix = url_manual_prefix.replace("manual", "manual/" + LANG)
# - The first item is a wildcard - typical file system globbing
# using python module 'fnmatch.fnmatch'
@@ -98,61 +79,65 @@ url_manual_mapping = (
# *** Modifiers ***
# --- Intro ---
- ("bpy.types.Modifier.show_*", "modifiers/the_stack.html"),
- ("bpy.types.Modifier.*", "modifiers"), # catchall for various generic options
+ ("bpy.types.Modifier.show_*", "modeling/modifiers/the_stack.html"),
+ ("bpy.types.Modifier.*", "modeling/modifiers"), # catchall for various generic options
# --- Modify Modifiers ---
- ("bpy.types.MeshCacheModifier.*", "modifiers/modify/mesh_cache.html"),
- ("bpy.types.UVProjectModifier.*", "modifiers/modify/uv_project.html"),
- ("bpy.types.UVWarpModifier.*", "modifiers/modify/uv_warp.html"),
- ("bpy.types.VertexWeightMixModifier.*", "modifiers/modify/vertex_weight.html"),
- ("bpy.types.VertexWeightEditModifier.*", "modifiers/modify/vertex_weight.html"),
- ("bpy.types.VertexWeightProximityModifier.*", "modifiers/modify/vertex_weight.html"),
+ ("bpy.types.MeshCacheModifier.*", "modeling/modifiers/modify/mesh_cache.html"),
+ ("bpy.types.UVProjectModifier.*", "modeling/modifiers/modify/uv_project.html"),
+ ("bpy.types.UVWarpModifier.*", "modeling/modifiers/modify/uv_warp.html"),
+ ("bpy.types.VertexWeightMixModifier.*", "modeling/modifiers/modify/vertex_weight.html"),
+ ("bpy.types.VertexWeightEditModifier.*", "modeling/modifiers/modify/vertex_weight.html"),
+ ("bpy.types.VertexWeightProximityModifier.*", "modeling/modifiers/modify/vertex_weight.html"),
# --- Generate Modifiers ---
- ("bpy.types.ArrayModifier.*", "modifiers/generate/array.html"),
- ("bpy.types.BevelModifier.*", "modifiers/generate/bevel.html"),
- ("bpy.types.BooleanModifier.*", "modifiers/generate/booleans.html"),
- ("bpy.types.BuildModifier.*", "modifiers/generate/build.html"),
- ("bpy.types.DecimateModifier.*", "modifiers/generate/decimate.html"),
- ("bpy.types.EdgeSplitModifier.*", "modifiers/generate/edge_split.html"),
- ("bpy.types.MaskModifier.*", "modifiers/generate/mask.html"),
- ("bpy.types.MirrorModifier.*", "modifiers/generate/mirror.html"),
- ("bpy.types.MultiresModifier.*", "modifiers/generate/multiresolution.html"),
- ("bpy.types.RemeshModifier.*", "modifiers/generate/remesh.html"),
- ("bpy.types.ScrewModifier.*", "modifiers/generate/screw.html"),
- ("bpy.types.SkinModifier.*", "modifiers/generate/skin.html"),
- ("bpy.types.SolidifyModifier.*", "modifiers/generate/solidify.html"),
- ("bpy.types.SubsurfModifier.*", "modifiers/generate/subsurf.html"),
- ("bpy.types.TriangulateModifier.*","modifiers/generate/triangulate.html"),
+ ("bpy.types.ArrayModifier.*", "modeling/modifiers/generate/array.html"),
+ ("bpy.types.BevelModifier.*", "modeling/modifiers/generate/bevel.html"),
+ ("bpy.types.BooleanModifier.*", "modeling/modifiers/generate/booleans.html"),
+ ("bpy.types.BuildModifier.*", "modeling/modifiers/generate/build.html"),
+ ("bpy.types.DecimateModifier.*", "modeling/modifiers/generate/decimate.html"),
+ ("bpy.types.EdgeSplitModifier.*", "modeling/modifiers/generate/edge_split.html"),
+ ("bpy.types.MaskModifier.*", "modeling/modifiers/generate/mask.html"),
+ ("bpy.types.MirrorModifier.*", "modeling/modifiers/generate/mirror.html"),
+ ("bpy.types.MultiresModifier.*", "modeling/modifiers/generate/multiresolution.html"),
+ ("bpy.types.RemeshModifier.*", "modeling/modifiers/generate/remesh.html"),
+ ("bpy.types.ScrewModifier.*", "modeling/modifiers/generate/screw.html"),
+ ("bpy.types.SkinModifier.*", "modeling/modifiers/generate/skin.html"),
+ ("bpy.types.SolidifyModifier.*", "modeling/modifiers/generate/solidify.html"),
+ ("bpy.types.SubsurfModifier.*", "modeling/modifiers/generate/subsurf.html"),
+ ("bpy.types.TriangulateModifier.*","modeling/modifiers/generate/triangulate.html"),
# --- Deform Modifiers ---
- ("bpy.types.ArmatureModifier.*", "modifiers/deform/armature.html"),
- ("bpy.types.CastModifier.*", "modifiers/deform/cast.html"),
- ("bpy.types.CurveModifier.*", "modifiers/deform/curve.html"),
- ("bpy.types.DisplaceModifier.*", "modifiers/deform/displace.html"),
- ("bpy.types.HookModifier.*", "modifiers/deform/hooks.html"),
- ("bpy.types.LaplacianSmoothModifier.*", "modifiers/deform/laplacian_smooth.html"),
- ("bpy.types.LatticeModifier.*", "modifiers/deform/lattice.html"),
- ("bpy.types.MeshDeformModifier.*", "modifiers/deform/mesh_deform.html"),
- ("bpy.types.ShrinkwrapModifier.*", "modifiers/deform/shrinkwrap.html"),
- ("bpy.types.SimpleDeformModifier.*", "modifiers/deform/simple_deform.html"),
- ("bpy.types.SmoothModifier.*", "modifiers/deform/smooth.html"),
+ ("bpy.types.ArmatureModifier.*", "modeling/modifiers/deform/armature.html"),
+ ("bpy.types.CastModifier.*", "modeling/modifiers/deform/cast.html"),
+ ("bpy.types.CurveModifier.*", "modeling/modifiers/deform/curve.html"),
+ ("bpy.types.DisplaceModifier.*", "modeling/modifiers/deform/displace.html"),
+ ("bpy.types.HookModifier.*", "modeling/modifiers/deform/hooks.html"),
+ ("bpy.types.LaplacianSmoothModifier.*", "modeling/modifiers/deform/laplacian_smooth.html"),
+ ("bpy.types.LatticeModifier.*", "modeling/modifiers/deform/lattice.html"),
+ ("bpy.types.MeshDeformModifier.*", "modeling/modifiers/deform/mesh_deform.html"),
+ ("bpy.types.ShrinkwrapModifier.*", "modeling/modifiers/deform/shrinkwrap.html"),
+ ("bpy.types.SimpleDeformModifier.*", "modeling/modifiers/deform/simple_deform.html"),
+ ("bpy.types.SmoothModifier.*", "modeling/modifiers/deform/smooth.html"),
# ("bpy.types.SurfaceModifier.*", "Modifiers/Deform/"), # USERS NEVER SEE THIS
- ("bpy.types.WarpModifier.*", "modifiers/deform/warp.html"),
- ("bpy.types.WaveModifier.*", "modifiers/deform/wave.html"),
+ ("bpy.types.WarpModifier.*", "modeling/modifiers/deform/warp.html"),
+ ("bpy.types.WaveModifier.*", "modeling/modifiers/deform/wave.html"),
# --- Simulate Modifiers ---
- ("bpy.types.ClothModifier.*", "physics/cloth.html"),
+ ("bpy.types.ClothModifier.*", "physics/cloth"),
("bpy.types.CollisionModifier.*", "physics/collision.html"),
("bpy.types.DynamicPaintModifier.*", "physics/dynamic_paint"),
- ("bpy.types.ExplodeModifier.*", "modifiers/simulate/explode.html"),
+ ("bpy.types.ExplodeModifier.*", "modeling/modifiers/simulate/explode.html"),
("bpy.types.FluidSimulationModifier.*", "physics/fluid"),
- ("bpy.types.OceanModifier.*", "modifiers/simulate/ocean.html"),
- ("bpy.types.ParticleInstanceModifier.*", "modifiers/simulate/particle_instance.html"),
+ ("bpy.types.OceanModifier.*", "modeling/modifiers/simulate/ocean.html"),
+ ("bpy.types.ParticleInstanceModifier.*", "modeling/modifiers/simulate/particle_instance.html"),
("bpy.types.ParticleSystemModifier.*", "physics/particles"),
("bpy.types.SmokeModifier.*", "physics/smoke"),
("bpy.types.SoftBodyModifier.*", "physics/soft_body"),
# *** Constraints ***
- ("bpy.types.Constraint.*", "rigging/constraints"),
- ("bpy.types.Constraint.mute", "rigging/constraints/the_stack.html"), # others could be added here?
+ ("bpy.types.Constraint.*", "rigging/constraints"),
+ ("bpy.types.Constraint.mute", "rigging/constraints/interface/the_stack.html"), # others could be added here?
+ # --- Motion Tracking Constraints ---
+ ("bpy.types.FollowTrackConstraint.*", "rigging/constraints/motion_tracking/follow_track.html"),
+ ("bpy.types.ObjectSolverConstraint.*", "rigging/constraints/motion_tracking/object_solver.html"),
+ ("bpy.types.CameraSolverConstraint.*", "rigging/constraints/motion_tracking/camera_solver.html"),
# --- Transform Constraints ---
("bpy.types.CopyLocationConstraint.*", "rigging/constraints/transform/copy_location.html"),
("bpy.types.CopyRotationConstraint.*", "rigging/constraints/transform/copy_rotation.html"),
@@ -165,272 +150,269 @@ url_manual_mapping = (
("bpy.types.MaintainVolumeConstraint.*", "rigging/constraints/transform/maintain_volume.html"),
("bpy.types.TransformConstraint.*", "rigging/constraints/transform/transformation.html"),
# --- Tracking Constraints ---
- ("bpy.types.ClampToConstraint.*", "rigging/constraints/tracking/clamp_to.html"),
- ("bpy.types.DampedTrackConstraint.*", "rigging/constraints/tracking/damped_track.html"),
- ("bpy.types.KinematicConstraint.*", "rigging/constraints/tracking/ik_solver.html"),
- ("bpy.types.LockedTrackConstraint.*", "rigging/constraints/tracking/locked_track.html"),
- ("bpy.types.SplineIKConstraint.*", "rigging/constraints/tracking/spline_ik.html"),
- ("bpy.types.StretchToConstraint.*", "rigging/constraints/tracking/stretch_to.html"),
- ("bpy.types.TrackToConstraint.*", "rigging/constraints/tracking/track_to.html"),
+ ("bpy.types.ClampToConstraint.*", "rigging/constraints/tracking/clamp_to.html"),
+ ("bpy.types.DampedTrackConstraint.*", "rigging/constraints/tracking/damped_track.html"),
+ ("bpy.types.KinematicConstraint.*", "rigging/constraints/tracking/ik_solver.html"),
+ ("bpy.types.LockedTrackConstraint.*", "rigging/constraints/tracking/locked_track.html"),
+ ("bpy.types.SplineIKConstraint.*", "rigging/constraints/tracking/spline_ik.html"),
+ ("bpy.types.StretchToConstraint.*", "rigging/constraints/tracking/stretch_to.html"),
+ ("bpy.types.TrackToConstraint.*", "rigging/constraints/tracking/track_to.html"),
# --- Relationship Constraints ---
("bpy.types.ActionConstraint.*", "rigging/constraints/relationship/action.html"),
- ("bpy.types.CameraSolverConstraint.*", "motion_tracking"), # not exact match
("bpy.types.ChildOfConstraint.*", "rigging/constraints/relationship/action.html"),
("bpy.types.FloorConstraint.*", "rigging/constraints/relationship/child_of.html"),
- ("bpy.types.FollowPathConstraint.*", "rigging/constraints/relationship/floor.html"),
- ("bpy.types.FollowTrackConstraint.*", "rigging/constraints/relationship/follow_path.html"),
- ("bpy.types.ObjectSolverConstraint.*", "motion_tracking"), # not exact match
+ ("bpy.types.FollowPathConstraint.*", "rigging/constraints/relationship/follow_path.html"),
("bpy.types.PivotConstraint.*", "rigging/constraints/relationship/pivot.html"),
- ("bpy.types.PythonConstraint.*", "rigging/constraints/relationship/script.html"),
("bpy.types.RigidBodyJointConstraint.*", "rigging/constraints/relationship/rigid_body_joint.html"),
("bpy.types.ShrinkwrapConstraint.*", "rigging/constraints/relationship/shrinkwrap.html"),
- ("bpy.types.ImageFormatSettings.*", "render/output.html#file-type"),
- ("bpy.types.RenderSettings.filepath", "render/output.html#file-locations"),
- ("bpy.types.RenderSettings.display_mode", "render/display.html#displaying-renders"),
- ("bpy.types.RenderSettings.*", "render"), # catchall, todo - refine
+ # *** Render Settings ***
+ ("bpy.types.ImageFormatSettings.*", "data_system/files/image_formats.html"),
+ ("bpy.types.RenderSettings.filepath", "render/output/output.html#output-panel"),
+ ("bpy.types.RenderSettings.display_mode", "render/output/display.html#displaying-renders"),
+ ("bpy.types.RenderSettings.*", "render"), # catchall, todo - refine
# *** ID Subclasses ***
- ("bpy.types.Action.*", "animation/basics/actions.html"),
- #("bpy.types.Brush.*", ""), # TODO - manual has no place for this! XXX
- ("bpy.types.Curve.*", "modeling/curves"),
- ("bpy.types.GreasePencil.*", "grease_pencil"),
- ("bpy.types.Group.*", "modeling/objects/groups_and_parenting.html#grouping-objects"),
- ("bpy.types.Image.*", "render/blender_render/textures/types/image.html"),
- ("bpy.types.ShapeKey.*", "animation/techs/shape/shape_keys.html"), # not an id but include because of key
- ("bpy.types.Key.*", "animation/techs/shape/shape_keys.html"),
- #("bpy.types.Lattice.*", ""), # TODO - manual has no place for this! XXX
- ("bpy.types.Library.*", "data_system/linked_libraries.html"),
- #("bpy.types.Mask.*", ""), # TODO - manual has no place for this! XXX
+ ("bpy.types.Action.*", "animation/actions.html"),
+ #("bpy.types.Brush.*", ""), # TODO - manual has no place for this! XXX
+ ("bpy.types.Curve.*", "modeling/curves"),
+ ("bpy.types.GreasePencil.*", "interface/grease_pencil/index.html"),
+ ("bpy.types.Group.*", "editors/3dview/relationships/groups.html"),
+ ("bpy.types.Image.*", "render/blender_render/textures/types/image.html"),
+ ("bpy.types.ShapeKey.*", "animation/shape_keys.html"), # not an id but include because of key
+ ("bpy.types.Key.*", "animation/shape_keys.html"),
+ #("bpy.types.Lattice.*", ""), # TODO - manual has no place for this! XXX
+ ("bpy.types.Library.*", "data_system/linked_libraries.html"),
+ ("bpy.types.Mask.*", "editors/movie_clip_editor/masking.html"),
+
# *** Materials (blender internal) ***
- ("bpy.types.Material.diffuse*", "render/blender_render/materials/properties/diffuse_shaders.html"),
+ ("bpy.types.Material.diffuse*", "render/blender_render/materials/properties/diffuse_shaders.html"),
("bpy.types.Material.specular*", "render/blender_render/materials/properties/specular_shaders.html"),
- ("bpy.types.Material.ambient*", "render/blender_render/materials/properties/shading.html"),
+ ("bpy.types.Material.ambient*", "render/blender_render/materials/properties/shading.html"),
("bpy.types.Material.preview_render_type", "render/blender_render/materials/properties/preview.html"),
- ("bpy.types.Material.*", "render/blender_render"), # catchall, until the section is filled in
-
+ ("bpy.types.Material.*", "render/blender_render"), # catchall, until the section is filled in
# ("bpy.types.MaterialSlot.link", "render/blender_render/materials/options.html#material-naming_and_linking"), # TODO, T42839
- ("bpy.types.MaterialVolume.*", "render/blender_render/materials/special_effects/volume.html"),
- ("bpy.types.MaterialHalo.*", "render/blender_render/materials/special_effects/halo.html"),
- ("bpy.types.MaterialStrand.*", "render/blender_render/materials/properties/strands.html"),
- ("bpy.types.MaterialSubsurfaceScattering.*", "render/blender_render/materials/properties/subsurface_scattering.html"),
- ("bpy.types.MaterialRaytraceMirror.*", "render/blender_render/materials/properties/mirror.html"),
- ("bpy.types.MaterialRaytraceTransparency.*", "render/blender_render/materials/properties/transparency.html#raytraced-transparency"),
+ ("bpy.types.MaterialVolume.*", "render/blender_render/materials/special_effects/volume.html"),
+ ("bpy.types.MaterialHalo.*", "render/blender_render/materials/special_effects/halo.html"),
+ ("bpy.types.MaterialStrand.*", "render/blender_render/materials/properties/strands.html"),
+ ("bpy.types.MaterialSubsurfaceScattering.*", "render/blender_render/materials/properties/subsurface_scattering.html"),
+ ("bpy.types.MaterialRaytraceMirror.*", "render/blender_render/materials/properties/mirror.html"),
+ ("bpy.types.MaterialRaytraceTransparency.*", "render/blender_render/materials/properties/transparency.html#raytraced-transparency"),
# ... todo, many more options
- ("bpy.types.MovieClip.*", "motion_tracking/index.html#movie-clip_editor.html"),
- ("bpy.types.MovieTrackingCamera.*", "motion_tracking/index.html#camera-data_panel"),
- ("bpy.types.MovieTrackingStabilization.*", "motion_tracking/index.html#tools-available-in-reconstruction-mode"),
- ("bpy.types.MovieTrackingTrack*", "motion_tracking/index.html#tools-available-in-tracking-mode"),
- ("bpy.types.MovieTracking*", "motion_tracking"),
- ("bpy.types.SpaceClipEditor.*", "motion_tracking/index.html#movie-clip-editor"),
- ("bpy.types.ColorManaged*", "render/post_process/cm_and_exposure.html"),
- #("bpy.types.NodeTree.*", ""), # dont document
- ("bpy.types.Object.*", "modeling/objects"), # catchall, todo - refine
- ("bpy.types.ParticleSettings.*", "physics/particles"),
- ("bpy.types.Scene.*", "getting_started/basics/interface/scenes.html"),
- ("bpy.types.Screen.*", "getting_started/basics/interface/screens.html"),
- #("bpy.types.Sound.*", ""), # TODO - manual has no place for this! XXX
- #("bpy.types.Speaker.*", ""), # TODO - manual has no place for this! XXX
- ("bpy.types.Text.*", "extensions/python/text_editor.html"),
- ("bpy.types.Texture.*", "render/blender_render/textures"),
- ("bpy.types.VectorFont.*", "modeling/texts"),
- ("bpy.types.WindowManager.*", "getting_started/basics/interface/window_system"),
- ("bpy.types.World.*", "render/blender_render/world"),
- ("bpy.types.WorldLighting.*ao*", "render/blender_render/lighting/ambient_occlusion.html"),
- ("bpy.types.WorldLighting.*ambient*", "render/blender_render/lighting/ambient_occlusion.html"),
- ("bpy.types.WorldLighting.*environment*", "render/blender_render/lighting/ambient_light.html"),
+ ("bpy.types.MovieClip.*", "editors/movie_clip_editor/index.html"),
+ ("bpy.types.MovieTrackingCamera.*", "editors/movie_clip_editor/tracking/clip.html#tools-available-in-reconstruction-mode"),
+ ("bpy.types.MovieTrackingStabilization.*", "editors/movie_clip_editor/index.html#tools-available-in-reconstruction-mode"),
+ ("bpy.types.MovieTrackingTrack*", "editors/movie_clip_editor/index.html#tools-available-in-tracking-mode"),
+ ("bpy.types.MovieTracking*", "editors/movie_clip_editor"),
+ ("bpy.types.SpaceClipEditor.*", "editors/movie_clip_editor/introduction.html"),
+ ("bpy.types.ColorManaged*", "render/post_process/cm_and_exposure.html"),
+ #("bpy.types.NodeTree.*", ""), # dont document
+ ("bpy.types.Object.*", "editors/3dview/objects"), # catchall, todo - refine
+ ("bpy.types.ParticleSettings.*", "physics/particles"),
+ ("bpy.types.Scene.*", "data_system/scenes.html"),
+ ("bpy.types.Screen.*", "interface/screens.html"),
+ ("bpy.types.Sound.*", "editors/3dview/objects/types/speaker.html"),
+ ("bpy.types.Speaker.*", "editors/3dview/objects/types/speaker.html"),
+ ("bpy.types.Text.*", "editors/text_editor.html"),
+ ("bpy.types.Texture.*", "render/blender_render/textures"),
+ ("bpy.types.VectorFont.*", "modeling/texts"),
+ ("bpy.types.WindowManager.*", "interface/window_system"),
+ ("bpy.types.World.*", "render/blender_render/world"),
+ ("bpy.types.WorldLighting.*ao*", "render/blender_render/lighting/ambient_occlusion.html"),
+ ("bpy.types.WorldLighting.*ambient*", "render/blender_render/lighting/ambient_occlusion.html"),
+ ("bpy.types.WorldLighting.*environment*", "render/blender_render/lighting/ambient_light.html"),
# only other WorldLighting props are in Gather panel
- ("bpy.types.WorldLighting.*", "render/blender_render/lighting/ambient_occlusion.html#gather"),
+ ("bpy.types.WorldLighting.*", "render/blender_render/lighting/ambient_occlusion.html#gather"),
# *** Cycles ***
- ("bpy.types.CyclesRenderSettings.*", "render/cycles/settings/integrator.html"),
+ ("bpy.types.CyclesRenderSettings.*", "render/cycles/settings/integrator.html"),
("bpy.types.CyclesVisibilitySettings.*", "render/cycles/settings/light_paths.html#ray-visibility"),
- ("bpy.types.CyclesWorldSettings.*", "render/cycles/world.html"),
- ("bpy.types.SceneRenderLayer.*pass*", "render/blender_render/passes.html"),
- ("bpy.types.SceneRenderLayer.*", "render/post_process/layers.html"),
- ("bpy.types.Cycles*", "render/cycles"),
+ ("bpy.types.CyclesWorldSettings.*", "render/cycles/world.html"),
+ ("bpy.types.SceneRenderLayer.*pass*", "render/blender_render/passes.html"),
+ ("bpy.types.SceneRenderLayer.*", "render/post_process/layers.html"),
+ ("bpy.types.Cycles*", "render/cycles"),
# Currently all manual links on all sockets and values (such as Fac, Roughness, Color...) are NodeSocket* type.
# It'd be much better if the name of the socket could be used for the manual reference
- ("bpy.types.NodeSocket*", "composite_nodes/node_controls.html"), # no generic socket type page exists, but composite types are the same
+ ("bpy.types.NodeSocket*", "editors/node_editor/node_parts.html"), # no generic socket type page exists, but composite types are the same
# *** Cycles Material Nodes ***
# Outputs
- ("bpy.types.ShaderNodeOutputLamp.*", "render/cycles/lamps.html"),
- ("bpy.types.ShaderNodeOutputMaterial.*", "render/cycles/materials"),
- ("bpy.types.ShaderNodeOutputWorld.*", "render/cycles/world.html"),
+ ("bpy.types.ShaderNodeOutputLamp.*", "render/cycles/lamps.html"),
+ ("bpy.types.ShaderNodeOutputMaterial.*", "render/cycles/materials"),
+ ("bpy.types.ShaderNodeOutputWorld.*", "render/cycles/world.html"),
# Shaders
- ("bpy.types.ShaderNodeAddShader.*", "render/cycles/nodes/shaders.html#mix-and-add"),
- ("bpy.types.ShaderNodeAmbientOcclusion.*", "render/cycles/nodes/shaders.html#ambient-occlusion"),
- ("bpy.types.ShaderNodeBackground.*", "render/cycles/nodes/shaders.html#background"),
- ("bpy.types.ShaderNodeBsdfAnisotropic.*", "render/cycles/nodes/shaders.html#anisotropic"),
- ("bpy.types.ShaderNodeBsdfDiffuse.*", "render/cycles/nodes/shaders.html#diffuse"),
- ("bpy.types.ShaderNodeBsdfGlass.*", "render/cycles/nodes/shaders.html#glass"),
- ("bpy.types.ShaderNodeBsdfGlossy.*", "render/cycles/nodes/shaders.html#glossy"),
- ("bpy.types.ShaderNodeBsdfHair.*", "render/cycles/nodes/shaders.html"), # todo doc
- ("bpy.types.ShaderNodeBsdfRefraction.*", "render/cycles/nodes/shaders.html#refraction"),
- ("bpy.types.ShaderNodeBsdfToon.*", "render/cycles/nodes/shaders.html#toon"),
- ("bpy.types.ShaderNodeBsdfTranslucent.*", "render/cycles/nodes/shaders.html#translucent"),
- ("bpy.types.ShaderNodeBsdfTransparent.*", "render/cycles/nodes/shaders.html#transparent"),
- ("bpy.types.ShaderNodeBsdfVelvet.*", "render/cycles/nodes/shaders.html#velvet"),
- ("bpy.types.ShaderNodeEmission.*", "render/cycles/nodes/shaders.html#emission"),
- ("bpy.types.ShaderNodeHoldout.*", "render/cycles/nodes/shaders.html#holdout"),
- ("bpy.types.ShaderNodeMixShader.*", "render/cycles/nodes/shaders.html#mix-and-add"),
+ ("bpy.types.ShaderNodeAddShader.*", "render/cycles/nodes/shaders.html#mix-and-add"),
+ ("bpy.types.ShaderNodeAmbientOcclusion.*", "render/cycles/nodes/shaders.html#ambient-occlusion"),
+ ("bpy.types.ShaderNodeBackground.*", "render/cycles/nodes/shaders.html#background"),
+ ("bpy.types.ShaderNodeBsdfAnisotropic.*", "render/cycles/nodes/shaders.html#anisotropic"),
+ ("bpy.types.ShaderNodeBsdfDiffuse.*", "render/cycles/nodes/shaders.html#diffuse"),
+ ("bpy.types.ShaderNodeBsdfGlass.*", "render/cycles/nodes/shaders.html#glass"),
+ ("bpy.types.ShaderNodeBsdfGlossy.*", "render/cycles/nodes/shaders.html#glossy"),
+ ("bpy.types.ShaderNodeBsdfHair.*", "render/cycles/nodes/shaders.html"), # todo doc
+ ("bpy.types.ShaderNodeBsdfRefraction.*", "render/cycles/nodes/shaders.html#refraction"),
+ ("bpy.types.ShaderNodeBsdfToon.*", "render/cycles/nodes/shaders.html#toon"),
+ ("bpy.types.ShaderNodeBsdfTranslucent.*", "render/cycles/nodes/shaders.html#translucent"),
+ ("bpy.types.ShaderNodeBsdfTransparent.*", "render/cycles/nodes/shaders.html#transparent"),
+ ("bpy.types.ShaderNodeBsdfVelvet.*", "render/cycles/nodes/shaders.html#velvet"),
+ ("bpy.types.ShaderNodeEmission.*", "render/cycles/nodes/shaders.html#emission"),
+ ("bpy.types.ShaderNodeHoldout.*", "render/cycles/nodes/shaders.html#holdout"),
+ ("bpy.types.ShaderNodeMixShader.*", "render/cycles/nodes/shaders.html#mix-and-add"),
("bpy.types.ShaderNodeSubsurfaceScattering.*", "render/cycles/nodes/shaders.html#subsurface-scattering"),
- ("bpy.types.ShaderNodeVolumeAbsorption.*", "render/cycles/nodes/shaders.html"), # todo doc
- ("bpy.types.ShaderNodeVolumeScatter.*", "render/cycles/nodes/shaders.html"), # todo doc
+ ("bpy.types.ShaderNodeVolumeAbsorption.*", "render/cycles/nodes/shaders.html"), # todo doc
+ ("bpy.types.ShaderNodeVolumeScatter.*", "render/cycles/nodes/shaders.html"), # todo doc
# Textures
- ("bpy.types.ShaderNodeTexBrick.*", "render/cycles/nodes/textures.html#brick-texture"),
- ("bpy.types.ShaderNodeTexChecker.*", "render/cycles/nodes/textures.html#checker-texture"),
+ ("bpy.types.ShaderNodeTexBrick.*", "render/cycles/nodes/textures.html#brick-texture"),
+ ("bpy.types.ShaderNodeTexChecker.*", "render/cycles/nodes/textures.html#checker-texture"),
("bpy.types.ShaderNodeTexEnvironment.*", "render/cycles/nodes/textures.html#environment-texture"),
- ("bpy.types.ShaderNodeTexGradient.*", "render/cycles/nodes/textures.html#gradient-texture"),
- ("bpy.types.ShaderNodeTexImage.*", "render/cycles/nodes/textures.html#image-texture"),
- ("bpy.types.ShaderNodeTexMagic.*", "render/cycles/nodes/textures.html#magic-texture"),
- ("bpy.types.ShaderNodeTexMusgrave.*", "render/cycles/nodes/textures.html#musgrave-texture"),
- ("bpy.types.ShaderNodeTexNoise.*", "render/cycles/nodes/textures.html#noise-texture"),
- ("bpy.types.ShaderNodeTexSky.*", "render/cycles/nodes/textures.html#sky-texture"),
- ("bpy.types.ShaderNodeTexVoronoi.*", "render/cycles/nodes/textures.html#voronoi-texture"),
- ("bpy.types.ShaderNodeTexWave.*", "render/cycles/nodes/textures.html#wave-texture"),
+ ("bpy.types.ShaderNodeTexGradient.*", "render/cycles/nodes/textures.html#gradient-texture"),
+ ("bpy.types.ShaderNodeTexImage.*", "render/cycles/nodes/textures.html#image-texture"),
+ ("bpy.types.ShaderNodeTexMagic.*", "render/cycles/nodes/textures.html#magic-texture"),
+ ("bpy.types.ShaderNodeTexMusgrave.*", "render/cycles/nodes/textures.html#musgrave-texture"),
+ ("bpy.types.ShaderNodeTexNoise.*", "render/cycles/nodes/textures.html#noise-texture"),
+ ("bpy.types.ShaderNodeTexSky.*", "render/cycles/nodes/textures.html#sky-texture"),
+ ("bpy.types.ShaderNodeTexVoronoi.*", "render/cycles/nodes/textures.html#voronoi-texture"),
+ ("bpy.types.ShaderNodeTexWave.*", "render/cycles/nodes/textures.html#wave-texture"),
# Other
- ("bpy.types.ShaderNodeAttribute.*", "render/cycles/nodes/more.html#attribute"),
- ("bpy.types.ShaderNodeBlackbody.*", "render/cycles/nodes/more.html#blackbody"),
+ ("bpy.types.ShaderNodeAttribute.*", "render/cycles/nodes/more.html#attribute"),
+ ("bpy.types.ShaderNodeBlackbody.*", "render/cycles/nodes/more.html#blackbody"),
# ("bpy.types.ShaderNodeBrightContrast.*", ""),
- ("bpy.types.ShaderNodeBump.*", "render/cycles/nodes/more.html#bump"),
- ("bpy.types.ShaderNodeCameraData.*", "render/cycles/nodes/more.html"), # TODO doc
+ ("bpy.types.ShaderNodeBump.*", "render/cycles/nodes/more.html#bump"),
+ ("bpy.types.ShaderNodeCameraData.*", "render/cycles/nodes/more.html"), # TODO doc
# ("bpy.types.ShaderNodeCombineHSV.*", ""),
# ("bpy.types.ShaderNodeCombineRGB.*", ""),
- ("bpy.types.ShaderNodeFresnel.*", "render/cycles/nodes/more.html#fresnel"),
+ ("bpy.types.ShaderNodeFresnel.*", "render/cycles/nodes/more.html#fresnel"),
# ("bpy.types.ShaderNodeGamma.*", ""),
- ("bpy.types.ShaderNodeGeometry.*", "render/cycles/nodes/more.html#geometry"),
- ("bpy.types.ShaderNodeHairInfo.*", "render/cycles/nodes/more.html#hair-info"),
+ ("bpy.types.ShaderNodeGeometry.*", "render/cycles/nodes/more.html#geometry"),
+ ("bpy.types.ShaderNodeHairInfo.*", "render/cycles/nodes/more.html#hair-info"),
# ("bpy.types.ShaderNodeHueSaturation.*", ""),
# ("bpy.types.ShaderNodeInvert.*", ""),
- ("bpy.types.ShaderNodeLayerWeight.*", "render/cycles/nodes/more.html#layer-weight"),
- ("bpy.types.ShaderNodeLightFalloff.*", "render/cycles/nodes/more.html#light-falloff"),
- ("bpy.types.ShaderNodeLightPath.*", "render/cycles/nodes/more.html#light-path"),
- ("bpy.types.ShaderNodeMapping.*", "render/cycles/nodes/more.html#mapping"),
+ ("bpy.types.ShaderNodeLayerWeight.*", "render/cycles/nodes/more.html#layer-weight"),
+ ("bpy.types.ShaderNodeLightFalloff.*", "render/cycles/nodes/more.html#light-falloff"),
+ ("bpy.types.ShaderNodeLightPath.*", "render/cycles/nodes/more.html#light-path"),
+ ("bpy.types.ShaderNodeMapping.*", "render/cycles/nodes/more.html#mapping"),
# # ("bpy.types.ShaderNodeMath.*", ""),
# ("bpy.types.ShaderNodeMixRGB.*", ""),
- ("bpy.types.ShaderNodeNormalMap.*", "render/cycles/nodes/more.html#normal-map"),
- ("bpy.types.ShaderNodeObjectInfo.*", "render/cycles/nodes/more.html#object-info"),
- ("bpy.types.ShaderNodeParticleInfo.*", "render/cycles/nodes/more.html#particle-info"),
- ("bpy.types.ShaderNodeRGB.*", "render/cycles/nodes/more.html#rgb"),
+ ("bpy.types.ShaderNodeNormalMap.*", "render/cycles/nodes/more.html#normal-map"),
+ ("bpy.types.ShaderNodeObjectInfo.*", "render/cycles/nodes/more.html#object-info"),
+ ("bpy.types.ShaderNodeParticleInfo.*", "render/cycles/nodes/more.html#particle-info"),
+ ("bpy.types.ShaderNodeRGB.*", "render/cycles/nodes/more.html#rgb"),
# ("bpy.types.ShaderNodeRGBCurve.*", ""),
# ("bpy.types.ShaderNodeRGBToBW.*", ""),
# ("bpy.types.ShaderNodeSeparateHSV.*", ""),
# ("bpy.types.ShaderNodeSeparateRGB.*", ""),
- ("bpy.types.ShaderNodeTangent.*", "render/cycles/nodes/more.html#tangent"),
- ("bpy.types.ShaderNodeTexCoord.*", "render/cycles/nodes/more.html#texture-coordinates"),
- ("bpy.types.ShaderNodeValue.*", "render/cycles/nodes/more.html#value"),
+ ("bpy.types.ShaderNodeTangent.*", "render/cycles/nodes/more.html#tangent"),
+ ("bpy.types.ShaderNodeTexCoord.*", "render/cycles/nodes/more.html#texture-coordinates"),
+ ("bpy.types.ShaderNodeValue.*", "render/cycles/nodes/more.html#value"),
# ("bpy.types.ShaderNodeVectorCurve.*", ""),
- ("bpy.types.ShaderNodeVectorMath.*", "render/cycles/nodes/more.html"), # TODO doc
+ ("bpy.types.ShaderNodeVectorMath.*", "render/cycles/nodes/more.html"), # TODO doc
("bpy.types.ShaderNodeVectorTransform.*", "render/cycles/nodes/more.html#vector-transform"),
- ("bpy.types.ShaderNodeWavelength.*", "render/cycles/nodes/more.html#wavelength"),
- ("bpy.types.ShaderNodeWireframe.*", "render/cycles/nodes/more.html#wireframe"),
+ ("bpy.types.ShaderNodeWavelength.*", "render/cycles/nodes/more.html#wavelength"),
+ ("bpy.types.ShaderNodeWireframe.*", "render/cycles/nodes/more.html#wireframe"),
- ("bpy.types.ShaderNodeGroup.*", "composite_nodes/node_groups.html"),
- ("bpy.types.ShaderNode*", "render/cycles/nodes"),
+ ("bpy.types.ShaderNodeGroup.*", "editors/node_editor/node_groups.html"),
+ ("bpy.types.ShaderNode*", "render/cycles/nodes"),
- ("bpy.types.ShaderNodeScript.*", "render/cycles/osl.html"),
+ ("bpy.types.ShaderNodeScript.*", "render/cycles/osl.html"),
# *** Compositing Nodes ***
# Input
- ("bpy.types.CompositorNodeBokehImage.*", "composite_nodes/types/input/bokeh_image.html"),
- ("bpy.types.CompositorNodeImage.*", "composite_nodes/types/input/image.html"),
- ("bpy.types.CompositorNodeMask.*", "composite_nodes/types/input/mask.html"),
- ("bpy.types.CompositorNodeMovieClip.*", "composite_nodes/types/input/movie_clip.html"),
- ("bpy.types.CompositorNodeRGB.*", "composite_nodes/types/input/rgb.html"),
- ("bpy.types.CompositorNodeRLayers.*", "composite_nodes/types/input/render_layers.html"),
- ("bpy.types.CompositorNodeTexture.*", "composite_nodes/types/input/texture.html"),
- ("bpy.types.CompositorNodeTime.*", "composite_nodes/types/input/time.html"),
- ("bpy.types.CompositorNodeTrackPos.*", "composite_nodes/types/input/track_position.html"),
- ("bpy.types.CompositorNodeValue.*", "composite_nodes/types/input/value.html"),
+ ("bpy.types.CompositorNodeBokehImage.*", "compositing/types/input/bokeh_image.html"),
+ ("bpy.types.CompositorNodeImage.*", "compositing/types/input/image.html"),
+ ("bpy.types.CompositorNodeMask.*", "compositing/types/input/mask.html"),
+ ("bpy.types.CompositorNodeMovieClip.*", "compositing/types/input/movie_clip.html"),
+ ("bpy.types.CompositorNodeRGB.*", "compositing/types/input/rgb.html"),
+ ("bpy.types.CompositorNodeRLayers.*", "compositing/types/input/render_layers.html"),
+ ("bpy.types.CompositorNodeTexture.*", "compositing/types/input/texture.html"),
+ ("bpy.types.CompositorNodeTime.*", "compositing/types/input/time.html"),
+ ("bpy.types.CompositorNodeTrackPos.*", "compositing/types/input/track_position.html"),
+ ("bpy.types.CompositorNodeValue.*", "compositing/types/input/value.html"),
# Output
- ("bpy.types.CompositorNodeComposite.*", "composite_nodes/types/output/composite.html"),
- ("bpy.types.CompositorNodeLevels.*", "composite_nodes/types/output/levels.html"),
- ("bpy.types.CompositorNodeOutputFile*", "composite_nodes/types/output/file.html"),
- ("bpy.types.CompositorNodeSplitViewer.*", "composite_nodes/types/output/split_viewer.html"),
- ("bpy.types.CompositorNodeViewer.*", "composite_nodes/types/output/viewer.html"),
+ ("bpy.types.CompositorNodeComposite.*", "compositing/types/output/composite.html"),
+ ("bpy.types.CompositorNodeLevels.*", "compositing/types/output/levels.html"),
+ ("bpy.types.CompositorNodeOutputFile*", "compositing/types/output/file.html"),
+ ("bpy.types.CompositorNodeSplitViewer.*", "compositing/types/output/split_viewer.html"),
+ ("bpy.types.CompositorNodeViewer.*", "compositing/types/output/viewer.html"),
# Color
- ("bpy.types.CompositorNodeAlphaOver.*", "composite_nodes/types/color/alpha_over.html"),
- ("bpy.types.CompositorNodeBrightContrast.*", "composite_nodes/types/color/bright_contrast.html"),
- ("bpy.types.CompositorNodeColorBalance.*", "composite_nodes/types/color/bright_contrast.html"),
- ("bpy.types.CompositorNodeColorCorrection.*", "composite_nodes/types/color/color_correction.html"),
- ("bpy.types.CompositorNodeCurveRGB.*", "composite_nodes/types/color/rgb_curves.html"),
- ("bpy.types.CompositorNodeGamma.*", "composite_nodes/types/color/gamma.html"),
- ("bpy.types.CompositorNodeHueCorrect.*", "composite_nodes/types/color/hue_correct.html"),
- ("bpy.types.CompositorNodeHueSat.*", "composite_nodes/types/color/hue_saturation.html"),
- ("bpy.types.CompositorNodeInvert.*", "composite_nodes/types/color/invert.html"),
- ("bpy.types.CompositorNodeMixRGB.*", "composite_nodes/types/color/mix.html"),
- ("bpy.types.CompositorNodeTonemap.*", "composite_nodes/types/color/tone_map.html"),
- ("bpy.types.CompositorNodeZcombine.*", "composite_nodes/types/color/z-combine.html"),
+ ("bpy.types.CompositorNodeAlphaOver.*", "compositing/types/color/alpha_over.html"),
+ ("bpy.types.CompositorNodeBrightContrast.*", "compositing/types/color/bright_contrast.html"),
+ ("bpy.types.CompositorNodeColorBalance.*", "compositing/types/color/bright_contrast.html"),
+ ("bpy.types.CompositorNodeColorCorrection.*", "compositing/types/color/color_correction.html"),
+ ("bpy.types.CompositorNodeCurveRGB.*", "compositing/types/color/rgb_curves.html"),
+ ("bpy.types.CompositorNodeGamma.*", "compositing/types/color/gamma.html"),
+ ("bpy.types.CompositorNodeHueCorrect.*", "compositing/types/color/hue_correct.html"),
+ ("bpy.types.CompositorNodeHueSat.*", "compositing/types/color/hue_saturation.html"),
+ ("bpy.types.CompositorNodeInvert.*", "compositing/types/color/invert.html"),
+ ("bpy.types.CompositorNodeMixRGB.*", "compositing/types/color/mix.html"),
+ ("bpy.types.CompositorNodeTonemap.*", "compositing/types/color/tone_map.html"),
+ ("bpy.types.CompositorNodeZcombine.*", "compositing/types/color/z_combine.html"),
# Converter
- ("bpy.types.CompositorNodeSep*", "composite_nodes/types/converter/combine_separate.html"),
- ("bpy.types.CompositorNodeComb*", "composite_nodes/types/converter/combine_separate.html"),
- ("bpy.types.CompositorNodeIDMask.*", "composite_nodes/types/converter/id_mask.html"),
- ("bpy.types.CompositorNodeMath.*", "composite_nodes/types/converter/math.html"),
- ("bpy.types.CompositorNodePremulKey.*", "composite_nodes/types/converter/alpha_convert.html"),
- ("bpy.types.CompositorNodeRGBToBW.*", "composite_nodes/types/converter/rgb_to_bw.html"),
- ("bpy.types.CompositorNodeSetAlpha.*", "composite_nodes/types/converter/set_alpha.html"),
+ ("bpy.types.CompositorNodeSep*", "compositing/types/converter/combine_separate.html"),
+ ("bpy.types.CompositorNodeComb*", "compositing/types/converter/combine_separate.html"),
+ ("bpy.types.CompositorNodeIDMask.*", "compositing/types/converter/id_mask.html"),
+ ("bpy.types.CompositorNodeMath.*", "compositing/types/converter/math.html"),
+ ("bpy.types.CompositorNodePremulKey.*", "compositing/types/converter/alpha_convert.html"),
+ ("bpy.types.CompositorNodeRGBToBW.*", "compositing/types/converter/rgb_to_bw.html"),
+ ("bpy.types.CompositorNodeSetAlpha.*", "compositing/types/converter/set_alpha.html"),
# Filter
- ("bpy.types.CompositorNodeBilateralblur.*", "composite_nodes/types/filter/bilateral_blur.html"),
- ("bpy.types.CompositorNodeBlur.*", "composite_nodes/types/filter/blur_node.html"),
- ("bpy.types.CompositorNodeBokehBlur.*", "composite_nodes/types/filter/bokeh_blur.html"),
- ("bpy.types.CompositorNodeDBlur.*", "composite_nodes/types/filter/directional_blur.html"),
- ("bpy.types.CompositorNodeDefocus.*", "composite_nodes/types/filter/defocus.html"),
- ("bpy.types.CompositorNodeDespeckle.*", "composite_nodes/types/filter/despeckle.html"),
- ("bpy.types.CompositorNodeDilateErode.*", "composite_nodes/types/filter/dilate_erode.html"),
- ("bpy.types.CompositorNodeFilter.*", "composite_nodes/types/filter/filter_node.html"),
- ("bpy.types.CompositorNodeGlare.*", "composite_nodes/types/filter/glare.html"),
- ("bpy.types.CompositorNodeInpaint.*", "composite_nodes/types/filter/inpaint.html"),
- ("bpy.types.CompositorNodePixelate.*", "composite_nodes/types/filter/pixelate.html"),
- ("bpy.types.CompositorNodeSunBeams.*", "composite_nodes/types/filter/sun_beams.html"),
- ("bpy.types.CompositorNodeVecBlur.*", "composite_nodes/types/filter/vector_blur.html"),
+ ("bpy.types.CompositorNodeBilateralblur.*", "compositing/types/filter/bilateral_blur.html"),
+ ("bpy.types.CompositorNodeBlur.*", "compositing/types/filter/blur_node.html"),
+ ("bpy.types.CompositorNodeBokehBlur.*", "compositing/types/filter/bokeh_blur.html"),
+ ("bpy.types.CompositorNodeDBlur.*", "compositing/types/filter/directional_blur.html"),
+ ("bpy.types.CompositorNodeDefocus.*", "compositing/types/filter/defocus.html"),
+ ("bpy.types.CompositorNodeDespeckle.*", "compositing/types/filter/despeckle.html"),
+ ("bpy.types.CompositorNodeDilateErode.*", "compositing/types/filter/dilate_erode.html"),
+ ("bpy.types.CompositorNodeFilter.*", "compositing/types/filter/filter_node.html"),
+ ("bpy.types.CompositorNodeGlare.*", "compositing/types/filter/glare.html"),
+ ("bpy.types.CompositorNodeInpaint.*", "compositing/types/filter/inpaint.html"),
+ ("bpy.types.CompositorNodePixelate.*", "compositing/types/filter/pixelate.html"),
+ ("bpy.types.CompositorNodeSunBeams.*", "compositing/types/filter/sun_beams.html"),
+ ("bpy.types.CompositorNodeVecBlur.*", "compositing/types/filter/vector_blur.html"),
# Vector
- ("bpy.types.CompositorNodeCurveVec.*", "composite_nodes/types/vector/vector_curves.html"),
- ("bpy.types.CompositorNodeMapRange.*", "composite_nodes/types/vector/map_range.html"),
- ("bpy.types.CompositorNodeMapValue.*", "composite_nodes/types/vector/map_value.html"),
- ("bpy.types.CompositorNodeNormal.*", "composite_nodes/types/vector/normal.html"),
- ("bpy.types.CompositorNodeNormalize.*", "composite_nodes/types/vector/normalize.html"),
+ ("bpy.types.CompositorNodeCurveVec.*", "compositing/types/vector/vector_curves.html"),
+ ("bpy.types.CompositorNodeMapRange.*", "compositing/types/vector/map_range.html"),
+ ("bpy.types.CompositorNodeMapValue.*", "compositing/types/vector/map_value.html"),
+ ("bpy.types.CompositorNodeNormal.*", "compositing/types/vector/normal.html"),
+ ("bpy.types.CompositorNodeNormalize.*", "compositing/types/vector/normalize.html"),
# Matte
- ("bpy.types.CompositorNodeBoxMask.*", "composite_nodes/types/matte/box_mask.html"),
- ("bpy.types.CompositorNodeChannelMatte.*", "composite_nodes/types/matte/channel_key.html"),
- ("bpy.types.CompositorNodeChromaMatte.*", "composite_nodes/types/matte/chroma_key.html"),
- ("bpy.types.CompositorNodeColorMatte.*", "composite_nodes/types/matte/color_key.html"),
- ("bpy.types.CompositorNodeColorSpill.*", "composite_nodes/types/matte/color_spill_key.html"),
- ("bpy.types.CompositorNodeDiffMatte.*", "composite_nodes/types/matte/difference_key.html"),
- ("bpy.types.CompositorNodeDistanceMatte.*", "composite_nodes/types/matte/difference_key.html"),
- ("bpy.types.CompositorNodeDoubleEdgeMask.*", "composite_nodes/types/matte/double_edge_mask.html"),
- ("bpy.types.CompositorNodeEllipseMask.*", "composite_nodes/types/matte/ellipse_mask.html"),
- ("bpy.types.CompositorNodeKeying.*", "composite_nodes/types/matte/keying.html"),
- ("bpy.types.CompositorNodeKeyingScreen.*", "composite_nodes/types/matte/keying_screen.html"),
- ("bpy.types.CompositorNodeLumaMatte.*", "composite_nodes/types/matte/luminance_key.html"),
+ ("bpy.types.CompositorNodeBoxMask.*", "compositing/types/matte/box_mask.html"),
+ ("bpy.types.CompositorNodeChannelMatte.*", "compositing/types/matte/channel_key.html"),
+ ("bpy.types.CompositorNodeChromaMatte.*", "compositing/types/matte/chroma_key.html"),
+ ("bpy.types.CompositorNodeColorMatte.*", "compositing/types/matte/color_key.html"),
+ ("bpy.types.CompositorNodeColorSpill.*", "compositing/types/matte/color_spill_key.html"),
+ ("bpy.types.CompositorNodeDiffMatte.*", "compositing/types/matte/difference_key.html"),
+ ("bpy.types.CompositorNodeDistanceMatte.*", "compositing/types/matte/difference_key.html"),
+ ("bpy.types.CompositorNodeDoubleEdgeMask.*", "compositing/types/matte/double_edge_mask.html"),
+ ("bpy.types.CompositorNodeEllipseMask.*", "compositing/types/matte/ellipse_mask.html"),
+ ("bpy.types.CompositorNodeKeying.*", "compositing/types/matte/keying.html"),
+ ("bpy.types.CompositorNodeKeyingScreen.*", "compositing/types/matte/keying_screen.html"),
+ ("bpy.types.CompositorNodeLumaMatte.*", "compositing/types/matte/luminance_key.html"),
# Distort
- ("bpy.types.CompositorNodeCrop.*", "composite_nodes/types/distort/crop.html"),
- ("bpy.types.CompositorNodeDisplace.*", "composite_nodes/types/distort/displace.html"),
- ("bpy.types.CompositorNodeFlip.*", "composite_nodes/types/distort/flip.html"),
- ("bpy.types.CompositorNodeLensdist.*", "composite_nodes/types/distort/lens.html"),
- ("bpy.types.CompositorNodeMapUV.*", "composite_nodes/types/distort/map_uv.html"),
- ("bpy.types.CompositorNodeMovieDistortion.*", "composite_nodes/types/distort/movie_distortion.html"),
- ("bpy.types.CompositorNodePlaneTrackDeform.*", "composite_nodes/types/distort/plane_track_deform.html"),
- ("bpy.types.CompositorNodeRotate.*", "composite_nodes/types/distort/rotate.html"),
- ("bpy.types.CompositorNodeScale.*", "composite_nodes/types/distort/scale.html"),
- ("bpy.types.CompositorNodeStabilize.*", "composite_nodes/types/distort/stabilize_2d.html"),
- ("bpy.types.CompositorNodeTransform.*", "composite_nodes/types/distort/transform.html"),
- ("bpy.types.CompositorNodeTranslate.*", "composite_nodes/types/distort/translate.html"),
+ ("bpy.types.CompositorNodeCrop.*", "compositing/types/distort/crop.html"),
+ ("bpy.types.CompositorNodeDisplace.*", "compositing/types/distort/displace.html"),
+ ("bpy.types.CompositorNodeFlip.*", "compositing/types/distort/flip.html"),
+ ("bpy.types.CompositorNodeLensdist.*", "compositing/types/distort/lens.html"),
+ ("bpy.types.CompositorNodeMapUV.*", "compositing/types/distort/map_uv.html"),
+ ("bpy.types.CompositorNodeMovieDistortion.*", "compositing/types/distort/movie_distortion.html"),
+ ("bpy.types.CompositorNodePlaneTrackDeform.*", "compositing/types/distort/plane_track_deform.html"),
+ ("bpy.types.CompositorNodeRotate.*", "compositing/types/distort/rotate.html"),
+ ("bpy.types.CompositorNodeScale.*", "compositing/types/distort/scale.html"),
+ ("bpy.types.CompositorNodeStabilize.*", "compositing/types/distort/stabilize_2d.html"),
+ ("bpy.types.CompositorNodeTransform.*", "compositing/types/distort/transform.html"),
+ ("bpy.types.CompositorNodeTranslate.*", "compositing/types/distort/translate.html"),
#Other
- ("bpy.types.CompositorNodeGroup.*", "composite_nodes/node_groups.html"),
- ("bpy.types.CompositorNode*", "composite_nodes/types"), # catch anything else
+ ("bpy.types.CompositorNodeGroup.*", "editors/node_editor/node_groups.html"),
+ ("bpy.types.CompositorNode*", "compositing/types"), # catch anything else
("bpy.types.ColorRamp*", "render/blender_render/materials/properties/ramps.html"),
# *** ID Subclasses (cont.) Object Data ***
- ("bpy.types.Mesh.*", "modeling/meshes"), # catchall, todo - refine
- ("bpy.types.MetaBall.*", "modeling/metas"), # catchall, todo - refine
+ ("bpy.types.Mesh.*", "modeling/meshes"), # catchall, todo - refine
+ ("bpy.types.MetaBall.*", "modeling/metas"), # catchall, todo - refine
("bpy.types.TextCurve.*", "modeling/texts"), # catchall, todo - refine
- ("bpy.types.Armature.*", "rigging/armatures.html"), # catchall, todo - refine
- ("bpy.types.Camera.*", "render/camera"), # catchall, todo - refine
+ ("bpy.types.Armature.*", "rigging/armatures"), # catchall, todo - refine
+ ("bpy.types.Camera.*", "render/camera"), # catchall, todo - refine
("bpy.types.PointLamp.*", "render/blender_render/lighting/lamps/point"), # catchall, todo - refine
("bpy.types.AreaLamp.*", "render/blender_render/lighting/lamps/area"), # catchall, todo - refine
@@ -440,114 +422,114 @@ url_manual_mapping = (
("bpy.types.Lamp.*", "render/blender_render/lighting"), # catchall, todo - refine
# --- Animation ---
- ("bpy.types.Keyframe.*", "animation/basics/actions.html"),
- ("bpy.types.FCurve.*", "animation/editors/graph/fcurves.html"),
+ ("bpy.types.Keyframe.*", "animation/actions.html"),
+ ("bpy.types.FCurve.*", "editors/graph_editor/fcurves.html"),
# --- Rigging ---
("bpy.types.Bone.*", "rigging/armatures/bones.html"),
("bpy.types.EditBone.*", "rigging/armatures/bones.html"),
- ("bpy.types.PoseBone.*", "rigging/posing.html"),
+ ("bpy.types.PoseBone.*", "rigging/posing"),
# --- World ---
- ("bpy.types.World.*", "render/blender_render/world"),
+ ("bpy.types.World.*", "render/blender_render/world"),
- ("bpy.types.Texture.*", "render/blender_render/textures"),
+ ("bpy.types.Texture.*", "render/blender_render/textures"),
# *** Spaces ***
- ("bpy.types.SpaceView3D.*", "getting_started/basics/navigating"),
+ ("bpy.types.SpaceView3D.*", "editors/3dview"),
# === Operators ===
# Catch all only for now!
# *** Window/Screen ***
- ("bpy.ops.action.*", "animation/basics/actions.html"),
- ("bpy.ops.anim.*", "animation"),
- ("bpy.ops.armature.*", "rigging/armatures.html"),
- ("bpy.ops.boid.*", "physics/particles/physics/boids.html"),
+ ("bpy.ops.action.*", "animation/actions.html"),
+ ("bpy.ops.anim.*", "animation"),
+ ("bpy.ops.armature.*", "rigging/armatures/"),
+ ("bpy.ops.boid.*", "physics/particles/physics/boids.html"),
# ("bpy.ops.brush.*", ""), # TODO
- ("bpy.ops.buttons.*", "getting_started/basics/interface/buttons_and_controls.html"),
- ("bpy.ops.camera.*", "render/camera"),
- ("bpy.ops.clip.*", "motion_tracking/index.html#movie-clip-editor"),
- ("bpy.ops.cloth.*", "physics/cloth.html"),
- ("bpy.ops.console.*", "extensions/python/console.html"),
+ ("bpy.ops.buttons.*", "interface/buttons_and_controls.html"),
+ ("bpy.ops.camera.*", "render/camera"),
+ ("bpy.ops.clip.*", "editors/movie_clip_editor/index.html#movie-clip-editor"),
+ ("bpy.ops.cloth.*", "physics/cloth"),
+ ("bpy.ops.console.*", "editors/python_console.html"),
("bpy.ops.constraint.*", "rigging/constraints"),
- ("bpy.ops.curve.*", "modeling/curves"),
- ("bpy.ops.dpaint.*", "physics/dynamic_paint"),
- # ("bpy.ops.ed.*", ""), # TODO, this is for internal use only?
+ ("bpy.ops.curve.*", "modeling/curves"),
+ ("bpy.ops.dpaint.*", "physics/dynamic_paint"),
+ # ("bpy.ops.ed.*", ""), # TODO, this is for internal use only?
# ("bpy.ops.export_anim.*", ""), # TODO
# ("bpy.ops.export_mesh.*", ""), # TODO
- # ("bpy.ops.export_scene.*", ""), # TODO
+ # ("bpy.ops.export_scene.*", ""), # TODO
("bpy.ops.file.*", ""),
- ("bpy.ops.fluid.*", "physics/fluid"),
- ("bpy.ops.font.*", "modeling/texts"),
- ("bpy.ops.gpencil.*", "grease_pencil"),
- ("bpy.ops.graph.*", "animation/editors/graph/fcurves.html"),
- ("bpy.ops.group.*", "modeling/objects/groups_and_parenting.html#grouping-objects"),
- ("bpy.ops.image.*", "render/blender_render/textures/types/image.html"),
- # ("bpy.ops.import_anim.*", ""), # TODO
+ ("bpy.ops.fluid.*", "physics/fluid"),
+ ("bpy.ops.font.*", "modeling/texts"),
+ ("bpy.ops.gpencil.*", "interface/grease_pencil/index.html"),
+ ("bpy.ops.graph.*", "editors/graph_editor/fcurves.html"),
+ ("bpy.ops.group.*", "editors/3dview/groups.html"),
+ ("bpy.ops.image.*", "render/blender_render/textures/types/image.html"),
+ # ("bpy.ops.import_anim.*", ""), # TODO
# ("bpy.ops.import_curve.*", ""), # TODO
- # ("bpy.ops.import_mesh.*", ""), # TODO
+ # ("bpy.ops.import_mesh.*", ""), # TODO
# ("bpy.ops.import_scene.*", ""), # TODO
- # ("bpy.ops.info.*", ""), # TODO
- ("bpy.ops.lamp.*", "render/blender_render/lighting"), # --- todo ... all below ---
+ # ("bpy.ops.info.*", ""), # TODO
+ ("bpy.ops.lamp.*", "render/blender_render/lighting"), # --- todo ... all below ---
# ("bpy.ops.lattice.*", ""), # TODO
- ("bpy.ops.logic.*", "game_engine/logic"),
- ("bpy.ops.marker.*", "animation/basics/markers.html"),
- # ("bpy.ops.mask.*", ""), # TODO
- ("bpy.ops.material.new", "render/blender_render/materials/assigning_a_material.html#creating-a-new-material"),
- ("bpy.ops.material.*", "render/blender_render"),
- ("bpy.ops.mesh.vertices_smooth", "modeling/meshes/editing/deforming/smooth.html"),
+ ("bpy.ops.logic.*", "game_engine/logic"),
+ ("bpy.ops.marker.*", "animation/markers.html"),
+ ("bpy.ops.mask.*", "editors/movie_clip_editor/masking.html"),
+ ("bpy.ops.material.new", "render/blender_render/materials/assigning_a_material.html#creating-a-new-material"),
+ ("bpy.ops.material.*", "render/blender_render"),
+ ("bpy.ops.mesh.vertices_smooth", "modeling/meshes/editing/deforming/smooth.html"),
("bpy.ops.view3d.edit_mesh_extrude*", "modeling/meshes/editing/duplicating/extrude.html"),
- ("bpy.ops.mesh.subdivide", "modeling/meshes/editing/subdividing/subdivide.html"),
- ("bpy.ops.mesh.loopcut_slide", "modeling/meshes/editing/subdividing/loop_subdivide.html"),
- ("bpy.ops.mesh.bridge-edge-loops", "modeling/meshes/editing/edges.html#bridge-edge-loops"),
- ("bpy.ops.mesh.duplicate_move", "modeling/meshes/editing/duplicating/duplicate.html"),
- ("bpy.ops.mesh.spin", "modeling/meshes/editing/duplicating/spin.html"),
- ("bpy.ops.mesh.screw", "modeling/meshes/editing/duplicating/screw.html"),
- ("bpy.ops.mesh.knife*", "modeling/meshes/editing/subdividing/knife_subdivide.html"),
- ("bpy.ops.mesh.bisect", "modeling/meshes/editing/subdividing/bisect.html"),
- ("bpy.ops.mball.*", "modeling/metas"),
- ("bpy.ops.mesh.*", "modeling/meshes"),
- ("bpy.ops.nla.*", "animation/editors/nla.html"),
- # ("bpy.ops.node.*", ""), # TODO
- ("bpy.ops.object.*shape_key*", "animation/techs/shape/shape_keys.html"),
- ("bpy.ops.object.join_shapes", "animation/techs/shape/shape_keys.html"),
- ("bpy.ops.object.*", "modeling/objects"),
- ("bpy.ops.outliner.*", "editors/outliner.html"),
- # ("bpy.ops.paint.*", ""), # TODO
- ("bpy.ops.particle.*", "physics/particles"),
- ("bpy.ops.pose.*", "rigging/posing.html"),
- ("bpy.ops.poselib.*", "rigging/posing/pose_library.html"),
- # ("bpy.ops.ptcache.*", ""), # TODO
-
- ("bpy.ops.render.play-rendered-anim", "render/display.html#animation-playback"),
- ("bpy.ops.render.*", "render"), # catchall
-
- ("bpy.ops.scene.*", "getting_started/basics/interface/scenes.html"),
- ("bpy.ops.screen.*", "getting_started/basics/interface/window_system"),
- ("bpy.ops.script.*", "extensions/python.html"),
- ("bpy.ops.sculpt.*", "modeling/meshes/editing/sculpt_mode.html"),
- ("bpy.ops.sequencer.*", "editors/sequencer"),
- # ("bpy.ops.sketch.*", ""), # TODO
- # ("bpy.ops.sound.*", ""), # TODO
- ("bpy.ops.surface.*", "modeling/surfaces"),
- ("bpy.ops.text.*", "extensions/python/text_editor.html"),
- ("bpy.ops.texture.*", "render/blender_render/textures"),
- ("bpy.ops.time.*", "animation/editors/timeline.html"),
- ("bpy.ops.transform.edge_slide", "modeling/meshes/editing/edges.html#edge-slide"),
- ("bpy.ops.transform.vert_slide", "modeling/meshes/editing/vertices.html#vertex-slide"),
- ("bpy.ops.transform.shrink_fatten", "modeling/meshes/editing/deforming/shrink-fatten.html"),
- ("bpy.ops.transform.push_pull", "modeling/meshes/editing/deforming/push_pull.html"),
- ("bpy.ops.transform.*", "getting_started/basics/transformations/transform_control"),
- ("bpy.ops.ui.*", "getting_started/basics/interface"),
- ("bpy.ops.uv.*", "render/blender_render/textures/mapping/uv/layout_editing.html"),
- # ("bpy.ops.view2d.*", ""), # TODO
- ("bpy.ops.view3d.*", "getting_started/basics/navigating/3d_view.html"), # this link is a bit arbitrary
- ("bpy.ops.wm.*", "getting_started/basics/interface/window_system"),
- ("bpy.ops.world.*", "render/blender_render/world"),
+ ("bpy.ops.mesh.subdivide", "modeling/meshes/editing/subdividing/subdivide.html"),
+ ("bpy.ops.mesh.loopcut_slide", "modeling/meshes/editing/subdividing/loop_subdivide.html"),
+ ("bpy.ops.mesh.bridge-edge-loops", "modeling/meshes/editing/edges.html#bridge-edge-loops"),
+ ("bpy.ops.mesh.duplicate_move", "modeling/meshes/editing/duplicating/duplicate.html"),
+ ("bpy.ops.mesh.spin", "modeling/meshes/editing/duplicating/spin.html"),
+ ("bpy.ops.mesh.screw", "modeling/meshes/editing/duplicating/screw.html"),
+ ("bpy.ops.mesh.knife*", "modeling/meshes/editing/subdividing/knife_subdivide.html"),
+ ("bpy.ops.mesh.bisect", "modeling/meshes/editing/subdividing/bisect.html"),
+ ("bpy.ops.mball.*", "modeling/metas"),
+ ("bpy.ops.mesh.*", "modeling/meshes"),
+ ("bpy.ops.nla.*", "editors/nla.html"),
+ # ("bpy.ops.node.*", ""), # TODO
+ ("bpy.ops.object.*shape_key*", "animation/shape_keys.html"),
+ ("bpy.ops.object.join_shapes", "animation/shape_keys.html"),
+ ("bpy.ops.object.*", "editors/3dview/transform"),
+ ("bpy.ops.outliner.*", "editors/outliner.html"),
+ # ("bpy.ops.paint.*", ""), # TODO
+ ("bpy.ops.particle.*", "physics/particles"),
+ ("bpy.ops.pose.*", "rigging/posing"),
+ ("bpy.ops.poselib.*", "rigging/posing/pose_library.html"),
+ # ("bpy.ops.ptcache.*", ""), # TODO
+
+ ("bpy.ops.render.play-rendered-anim", "render/output/display.html#animation-playback"),
+ ("bpy.ops.render.*", "render"), # catchall
+
+ ("bpy.ops.scene.*", "data_system/scenes.html"),
+ ("bpy.ops.screen.*", "interface/window_system"),
+ ("bpy.ops.script.*", "advanced/scripting"),
+ ("bpy.ops.sculpt.*", "painting_sculpting/sculpting/index.html"),
+ ("bpy.ops.sequencer.*", "editors/sequencer"),
+ # ("bpy.ops.sketch.*", ""), # TODO
+ ("bpy.ops.sound.*", "editors/3dview/objects/types/speaker.html"),
+ ("bpy.ops.surface.*", "modeling/surfaces"),
+ ("bpy.ops.text.*", "editors/text_editor.html"),
+ ("bpy.ops.texture.*", "render/blender_render/textures"),
+ ("bpy.ops.time.*", "editors/timeline.html"),
+ ("bpy.ops.transform.edge_slide", "modeling/meshes/editing/edges.html#edge-slide"),
+ ("bpy.ops.transform.vert_slide", "modeling/meshes/editing/vertices.html#vertex-slide"),
+ ("bpy.ops.transform.shrink_fatten", "modeling/meshes/editing/deforming/shrink-fatten.html"),
+ ("bpy.ops.transform.push_pull", "modeling/meshes/editing/deforming/push_pull.html"),
+ ("bpy.ops.transform.*", "editors/3dview/transform/transform_control"),
+ ("bpy.ops.ui.*", "interface"),
+ ("bpy.ops.uv.*", "editors/uv_image/index.html"),
+ # ("bpy.ops.view2d.*", ""), # TODO
+ ("bpy.ops.view3d.*", "editors/3dview/"),
+ ("bpy.ops.wm.*", "interface/window_system"),
+ ("bpy.ops.world.*", "render/blender_render/world"),
# === Tool Settings ===
- ("bpy.types.MeshStatVis.*", "modeling/meshes/mesh_analysis.html"),
+ ("bpy.types.MeshStatVis.*", "modeling/meshes/mesh_analysis.html"),
)
# may have 'url_reference_mapping'... etc later
diff --git a/netrender/master.py b/netrender/master.py
index d1b8fde2..d70a36ef 100644
--- a/netrender/master.py
+++ b/netrender/master.py
@@ -455,7 +455,7 @@ class RenderHandler(http.server.BaseHTTPRequestHandler):
message = frame.serialize()
else:
# no such frame
- self.send_heat(http.client.NO_CONTENT)
+ self.send_head(http.client.NO_CONTENT)
return
else:
message = job.serialize()
diff --git a/object_animrenderbake.py b/object_animrenderbake.py
index 695af7b1..552ac80c 100644
--- a/object_animrenderbake.py
+++ b/object_animrenderbake.py
@@ -19,8 +19,8 @@
bl_info = {
"name": "Animated Render Baker",
"author": "Janne Karhu (jahka)",
- "version": (1, 0),
- "blender": (2, 65, 0),
+ "version": (2, 0),
+ "blender": (2, 75, 0),
"location": "Properties > Render > Bake Panel",
"description": "Renderbakes a series of frames",
"category": "Object",
@@ -48,9 +48,10 @@ class OBJECT_OT_animrenderbake(bpy.types.Operator):
def invoke(self, context, event):
import shutil
-
+ is_cycles = (context.scene.render.engine == 'CYCLES')
+
scene = context.scene
-
+
start = scene.animrenderbake_start
end = scene.animrenderbake_end
@@ -82,12 +83,46 @@ class OBJECT_OT_animrenderbake(bpy.types.Operator):
# find the image that's used for rendering
# TODO: support multiple images per bake
- for uvtex in context.active_object.data.uv_textures:
- if uvtex.active_render == True:
- for uvdata in uvtex.data:
- if uvdata.image is not None:
- img = uvdata.image
+ if is_cycles:
+ # XXX This tries to mimic nodeGetActiveTexture(), but we have no access to 'texture_active' state from RNA...
+ # IMHO, this should be a func in RNA nodetree struct anyway?
+ inactive = None
+ selected = None
+ for mat_slot in context.active_object.material_slots:
+ mat = mat_slot.material
+ if not mat or not mat.node_tree:
+ continue
+ trees = [mat.node_tree]
+ while trees and not img:
+ tree = trees.pop()
+ node = tree.nodes.active
+ if node.type in {'TEX_IMAGE', 'TEX_ENVIRONMENT'}:
+ img = node.image
break
+ for node in tree.nodes:
+ if node.type in {'TEX_IMAGE', 'TEX_ENVIRONMENT'} and node.image:
+ if node.select:
+ if not selected:
+ selected = node
+ else:
+ if not inactive:
+ inactive = node
+ elif node.type == 'GROUP':
+ trees.add(node.node_tree)
+ if img:
+ break
+ if not img:
+ if selected:
+ img = selected.image
+ elif inactive:
+ img = inactive.image
+ else:
+ for uvtex in context.active_object.data.uv_textures:
+ if uvtex.active_render == True:
+ for uvdata in uvtex.data:
+ if uvdata.image is not None:
+ img = uvdata.image
+ break
if img is None:
self.report({'ERROR'}, "No valid image found to bake to")
@@ -111,7 +146,10 @@ class OBJECT_OT_animrenderbake(bpy.types.Operator):
# update scene to new frame and bake to template image
scene.frame_set(cfra)
- ret = bpy.ops.object.bake_image()
+ if is_cycles:
+ ret = bpy.ops.object.bake()
+ else:
+ ret = bpy.ops.object.bake_image()
if 'CANCELLED' in ret:
return {'CANCELLED'}
@@ -154,6 +192,9 @@ def register():
default=250)
bpy.types.RENDER_PT_bake.prepend(draw)
+ cycles_panel = getattr(bpy.types, "CyclesRender_PT_bake", None)
+ if cycles_panel:
+ cycles_panel.prepend(draw)
def unregister():
@@ -164,6 +205,9 @@ def unregister():
del bpy.types.Scene.animrenderbake_end
bpy.types.RENDER_PT_bake.remove(draw)
+ cycles_panel = getattr(bpy.types, "CyclesRender_PT_bake", None)
+ if cycles_panel:
+ cycles_panel.remove(draw)
if __name__ == "__main__":
diff --git a/object_cloud_gen.py b/object_cloud_gen.py
index b2aa7f4a..7d56f985 100644
--- a/object_cloud_gen.py
+++ b/object_cloud_gen.py
@@ -22,8 +22,8 @@ bl_info = {
"name": "Cloud Generator",
"author": "Nick Keeline(nrk)",
"version": (1, 0),
- "blender": (2, 71, 0),
- "location": "Tool Shelf > Create Tab",
+ "blender": (2, 75, 0),
+ "location": "Blender Render: Tool Shelf > Create Tab",
"description": "Creates Volumetric Clouds",
"wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/"
"Scripts/Object/Cloud_Gen",
@@ -306,39 +306,43 @@ class VIEW3D_PT_tools_cloud(Panel):
bl_options = {'DEFAULT_CLOSED'}
def draw(self, context):
- active_obj = context.active_object
- layout = self.layout
- col = layout.column(align=True)
+ if context.scene.render.engine == "BLENDER_RENDER":
+ active_obj = context.active_object
+ layout = self.layout
+ col = layout.column(align=True)
- WhatToDo = getActionToDo(active_obj)
+ WhatToDo = getActionToDo(active_obj)
- if WhatToDo == 'DEGENERATE':
- col.operator("cloud.generate_cloud", text="DeGenerate")
-
- elif WhatToDo == 'CLOUD_CONVERT_TO_MESH':
- col.operator("cloud.generate_cloud", text="Convert to Mesh")
-
- elif WhatToDo == 'NO_SELECTION_DO_NOTHING':
- col.label(text="Select one or more")
- col.label(text="objects to generate")
- col.label(text="a cloud")
+ if WhatToDo == 'DEGENERATE':
+ col.operator("cloud.generate_cloud", text="DeGenerate")
- elif WhatToDo == 'CLOUD_DO_NOTHING':
- col.label(text="Must select")
- col.label(text="bound box")
+ elif WhatToDo == 'CLOUD_CONVERT_TO_MESH':
+ col.operator("cloud.generate_cloud", text="Convert to Mesh")
- elif WhatToDo == 'GENERATE':
- col.operator("cloud.generate_cloud", text="Generate Cloud")
+ elif WhatToDo == 'NO_SELECTION_DO_NOTHING':
+ col.label(text="Select one or more")
+ col.label(text="objects to generate")
+ col.label(text="a cloud")
- col.prop(context.scene, "cloud_type")
- col.prop(context.scene, "cloudparticles")
- col.prop(context.scene, "cloudsmoothing")
- else:
- col.label(text="Select one or more")
- col.label(text="objects to generate")
- col.label(text="a cloud")
+ elif WhatToDo == 'CLOUD_DO_NOTHING':
+ col.label(text="Must select")
+ col.label(text="bound box")
+ elif WhatToDo == 'GENERATE':
+ col.operator("cloud.generate_cloud", text="Generate Cloud")
+ col.prop(context.scene, "cloud_type")
+ col.prop(context.scene, "cloudparticles")
+ col.prop(context.scene, "cloudsmoothing")
+ else:
+ col.label(text="Select one or more")
+ col.label(text="objects to generate")
+ col.label(text="a cloud")
+
+ if context.scene.render.engine == "CYCLES":
+ layout = self.layout
+ layout.label(text="Blender Render Only")
+
class GenerateCloud(Operator):
"""Create a Cloud,Undo Cloud, or convert to Mesh Cloud depending on selection"""
bl_idname = "cloud.generate_cloud"
@@ -353,8 +357,14 @@ class GenerateCloud(Operator):
else:
return (context.active_object.type == 'MESH')
+
def execute(self, context):
# Make variable that is the current .blend file main data blocks
+ space_data = bpy.context.space_data
+
+ if True in space_data.layers_local_view:
+ self.report({'INFO'}, 'Global Perspective mode only unable to continue.')
+ return {'FINISHED'}
blend_data = context.blend_data
# Make variable that is the active object selected by user
diff --git a/object_grease_scatter.py b/object_grease_scatter.py
index 1535698b..105b40ea 100644
--- a/object_grease_scatter.py
+++ b/object_grease_scatter.py
@@ -91,8 +91,8 @@ def _main(self,
DEBUG = False
def fix_point(p):
- hit, no, ind = closest_point_on_mesh(obj_mat_inv * p)
- if ind != -1:
+ ok, hit, no, ind = closest_point_on_mesh(obj_mat_inv * p)
+ if ok:
if DEBUG:
return [p, no, None]
else:
@@ -151,13 +151,12 @@ def _main(self,
m_alt_2 = Matrix.Rotation(radians(-22.5), 3, n)
for _m in mats:
for m in (_m, m_alt_1 * _m, m_alt_2 * _m):
- hit, nor, ind = ray(pofs, pofs + (m * n_seek))
- if ind != -1:
- dist = (pofs - hit).length
- if dist < best_dist:
- best_dist = dist
- best_nor = nor
- #best_hit = hit
+ pdir = m * n_seek
+ ok, hit, nor, ind = ray(pofs, pdir, best_dist)
+ if ok:
+ best_dist = (pofs - hit).length
+ best_nor = nor
+ # best_hit = hit
if best_nor:
pt[1].length = best_dist
@@ -211,9 +210,9 @@ def _main(self,
ntmp.y += uniform(-l, l) * RAND_LOC
ntmp.z += uniform(-l, l) * RAND_LOC
- hit, hit_no, ind = ray(vantage, vantage + ntmp)
+ ok, hit, hit_no, ind = ray(vantage, ntmp, ntmp.length)
# print(hit, hit_no)
- if ind != -1:
+ if ok:
if hit_no.angle(Z_UP) < WALL_LIMIT:
hits.append(hit)
nors.append(hit_no)
diff --git a/object_print3d_utils/export.py b/object_print3d_utils/export.py
index d0eeca3f..49a33e77 100644
--- a/object_print3d_utils/export.py
+++ b/object_print3d_utils/export.py
@@ -122,6 +122,7 @@ def write_mesh(context, info, report_cb):
filepath=filepath,
ascii=False,
use_mesh_modifiers=True,
+ use_selection=True,
global_scale=global_scale,
)
elif export_format == 'PLY':
diff --git a/object_print3d_utils/mesh_helpers.py b/object_print3d_utils/mesh_helpers.py
index 450e2388..5c72f4e4 100644
--- a/object_print3d_utils/mesh_helpers.py
+++ b/object_print3d_utils/mesh_helpers.py
@@ -106,63 +106,13 @@ def bmesh_check_self_intersect_object(obj):
if not obj.data.polygons:
return array.array('i', ())
- # Heres what we do!
- #
- # * Take original Mesh.
- # * Copy it and triangulate it (keeping list of original edge index values)
- # * Move the BMesh into a temp Mesh.
- # * Make a temp Object in the scene and assign the temp Mesh.
- # * For every original edge - ray-cast on the object to find which intersect.
- # * Report all edge intersections.
-
- # Triangulate
bm = bmesh_copy_from_object(obj, transform=False, triangulate=False)
- face_map_index_org = {f: i for i, f in enumerate(bm.faces)}
- ret = bmesh.ops.triangulate(bm, faces=bm.faces)
- face_map = ret["face_map"]
- # map new index to original index
- face_map_index = {i: face_map_index_org[face_map.get(f, f)] for i, f in enumerate(bm.faces)}
- del face_map_index_org
- del ret
-
- # Create a real mesh (lame!)
- scene = bpy.context.scene
- me_tmp = bpy.data.meshes.new(name="~temp~")
- bm.to_mesh(me_tmp)
- bm.free()
- obj_tmp = bpy.data.objects.new(name=me_tmp.name, object_data=me_tmp)
- scene.objects.link(obj_tmp)
- scene.update()
- ray_cast = obj_tmp.ray_cast
-
- faces_error = set()
-
- EPS_NORMAL = 0.000001
- EPS_CENTER = 0.01 # should always be bigger
-
- for ed in me_tmp.edges:
- v1i, v2i = ed.vertices
- v1 = me_tmp.vertices[v1i]
- v2 = me_tmp.vertices[v2i]
-
- # setup the edge with an offset
- co_1 = v1.co.copy()
- co_2 = v2.co.copy()
- co_mid = (co_1 + co_2) * 0.5
- no_mid = (v1.normal + v2.normal).normalized() * EPS_NORMAL
- co_1 = co_1.lerp(co_mid, EPS_CENTER) + no_mid
- co_2 = co_2.lerp(co_mid, EPS_CENTER) + no_mid
- co, no, index = ray_cast(co_1, co_2)
- if index != -1:
- faces_error.add(face_map_index[index])
-
- scene.objects.unlink(obj_tmp)
- bpy.data.objects.remove(obj_tmp)
- bpy.data.meshes.remove(me_tmp)
-
- scene.update()
+ import mathutils
+ tree = mathutils.bvhtree.BVHTree.FromBMesh(bm, epsilon=0.00001)
+ overlap = tree.overlap(tree)
+ faces_error = {i for i_pair in overlap for i in i_pair}
return array.array('i', faces_error)
@@ -230,10 +180,11 @@ def bmesh_check_thick_object(obj, thickness):
# Cast the ray backwards
p_a = p - no_sta
p_b = p - no_end
+ p_dir = p_b - p_a
- co, no, index = ray_cast(p_a, p_b)
+ ok, co, no, index = ray_cast(p_a, p_dir, p_dir.length)
- if index != -1:
+ if ok:
# Add the face we hit
for f_iter in (f, bm_faces_new[index]):
# if the face wasn't triangulated, just use existing
diff --git a/object_print3d_utils/ui.py b/object_print3d_utils/ui.py
index dfe2ac49..2bcf4d55 100644
--- a/object_print3d_utils/ui.py
+++ b/object_print3d_utils/ui.py
@@ -106,7 +106,7 @@ class Print3DToolBar:
rowsub.operator("mesh.print3d_clean_distorted", text="Distorted")
rowsub.prop(print_3d, "angle_distort", text="")
col = layout.column()
- col.operator("mesh.print3d_clean_non_manifold", text="Non-Manifold")
+ col.operator("mesh.print3d_clean_non_manifold", text="Make Manifold")
# XXX TODO
# col.operator("mesh.print3d_clean_thin", text="Wall Thickness")
diff --git a/presets/operator/curve.torus_knot_plus/13x8_wicker_globe.py b/presets/operator/curve.torus_knot_plus/13x8_wicker_globe.py
new file mode 100644
index 00000000..853a7144
--- /dev/null
+++ b/presets/operator/curve.torus_knot_plus/13x8_wicker_globe.py
@@ -0,0 +1,38 @@
+import bpy
+op = bpy.context.active_operator
+
+op.location = (0.0, 0.0, 0.0)
+op.view_align = False
+op.rotation = (0.0, 0.0, 0.0)
+op.options_plus = False
+op.absolute_location = False
+op.use_colors = False
+op.colorSet = '1'
+op.random_colors = False
+op.saturation = 0.75
+op.geo_surface = True
+op.geo_bDepth = 0.029999999329447746
+op.geo_bRes = 3
+op.geo_extrude = 0.0
+op.geo_offset = 0.0
+op.torus_p = 13
+op.torus_q = 8
+op.flip_p = False
+op.flip_q = False
+op.multiple_links = True
+op.torus_u = 1
+op.torus_v = 1
+op.torus_rP = 0.0
+op.torus_sP = 0.0
+op.mode = 'MAJOR_MINOR'
+op.torus_R = 0.0
+op.torus_r = 1.0
+op.torus_iR = 0.75
+op.torus_eR = 1.25
+op.torus_s = 1.0
+op.torus_h = 1.0
+op.torus_res = 767
+op.segment_res = 12
+op.outputType = 'BEZIER'
+op.handleType = 'AUTOMATIC'
+op.adaptive_resolution = True
diff --git a/presets/operator/curve.torus_knot_plus/7x6.py b/presets/operator/curve.torus_knot_plus/7x6.py
new file mode 100644
index 00000000..3398214c
--- /dev/null
+++ b/presets/operator/curve.torus_knot_plus/7x6.py
@@ -0,0 +1,38 @@
+import bpy
+op = bpy.context.active_operator
+
+op.location = (0.0, 0.0, 0.0)
+op.view_align = False
+op.rotation = (0.0, 0.0, 0.0)
+op.options_plus = False
+op.absolute_location = False
+op.use_colors = False
+op.colorSet = '1'
+op.random_colors = False
+op.saturation = 0.75
+op.geo_surface = True
+op.geo_bDepth = 0.019999999552965164
+op.geo_bRes = 2
+op.geo_extrude = 0.0
+op.geo_offset = 0.0
+op.torus_p = 7
+op.torus_q = 6
+op.flip_p = False
+op.flip_q = False
+op.multiple_links = True
+op.torus_u = 1
+op.torus_v = 1
+op.torus_rP = 0.0
+op.torus_sP = 0.0
+op.mode = 'MAJOR_MINOR'
+op.torus_R = 1.0
+op.torus_r = 0.25
+op.torus_iR = 0.75
+op.torus_eR = 1.25
+op.torus_s = 1.0
+op.torus_h = 1.0
+op.torus_res = 360
+op.segment_res = 12
+op.outputType = 'BEZIER'
+op.handleType = 'AUTOMATIC'
+op.adaptive_resolution = True
diff --git a/presets/operator/curve.torus_knot_plus/9x9_color.py b/presets/operator/curve.torus_knot_plus/9x9_color.py
new file mode 100644
index 00000000..a0622a92
--- /dev/null
+++ b/presets/operator/curve.torus_knot_plus/9x9_color.py
@@ -0,0 +1,38 @@
+import bpy
+op = bpy.context.active_operator
+
+op.location = (0.0, 0.0, 0.0)
+op.view_align = False
+op.rotation = (0.0, 0.0, 0.0)
+op.options_plus = False
+op.absolute_location = False
+op.use_colors = True
+op.colorSet = '1'
+op.random_colors = False
+op.saturation = 0.75
+op.geo_surface = True
+op.geo_bDepth = 0.019999999552965164
+op.geo_bRes = 2
+op.geo_extrude = 0.0
+op.geo_offset = 0.0
+op.torus_p = 9
+op.torus_q = 9
+op.flip_p = False
+op.flip_q = False
+op.multiple_links = True
+op.torus_u = 1
+op.torus_v = 1
+op.torus_rP = 0.0
+op.torus_sP = 0.0
+op.mode = 'MAJOR_MINOR'
+op.torus_R = 1.0
+op.torus_r = 0.25
+op.torus_iR = 0.75
+op.torus_eR = 1.25
+op.torus_s = 1.0
+op.torus_h = 1.0
+op.torus_res = 51
+op.segment_res = 12
+op.outputType = 'BEZIER'
+op.handleType = 'AUTOMATIC'
+op.adaptive_resolution = True
diff --git a/presets/operator/curve.torus_knot_plus/braided_coil.py b/presets/operator/curve.torus_knot_plus/braided_coil.py
new file mode 100644
index 00000000..b881f394
--- /dev/null
+++ b/presets/operator/curve.torus_knot_plus/braided_coil.py
@@ -0,0 +1,38 @@
+import bpy
+op = bpy.context.active_operator
+
+op.location = (0.0, 0.0, 0.0)
+op.view_align = False
+op.rotation = (0.0, 0.0, 0.0)
+op.options_plus = True
+op.absolute_location = False
+op.use_colors = False
+op.colorSet = '1'
+op.random_colors = False
+op.saturation = 0.75
+op.geo_surface = True
+op.geo_bDepth = 0.019999999552965164
+op.geo_bRes = 2
+op.geo_extrude = 0.0
+op.geo_offset = 0.0
+op.torus_p = 7
+op.torus_q = 6
+op.flip_p = False
+op.flip_q = False
+op.multiple_links = True
+op.torus_u = 143
+op.torus_v = 246
+op.torus_rP = 0.0
+op.torus_sP = 0.0
+op.mode = 'MAJOR_MINOR'
+op.torus_R = 1.0
+op.torus_r = 0.3100000023841858
+op.torus_iR = 0.75
+op.torus_eR = 1.25
+op.torus_s = 1.0
+op.torus_h = 1.0
+op.torus_res = 143
+op.segment_res = 12
+op.outputType = 'BEZIER'
+op.handleType = 'AUTOMATIC'
+op.adaptive_resolution = False
diff --git a/presets/operator/curve.torus_knot_plus/flower_mesh_(2d).py b/presets/operator/curve.torus_knot_plus/flower_mesh_(2d).py
new file mode 100644
index 00000000..863be9e9
--- /dev/null
+++ b/presets/operator/curve.torus_knot_plus/flower_mesh_(2d).py
@@ -0,0 +1,38 @@
+import bpy
+op = bpy.context.active_operator
+
+op.location = (0.0, 0.0, 0.0)
+op.view_align = False
+op.rotation = (0.0, 0.0, 0.0)
+op.options_plus = True
+op.absolute_location = False
+op.use_colors = False
+op.colorSet = '1'
+op.random_colors = False
+op.saturation = 0.75
+op.geo_surface = True
+op.geo_bDepth = 0.019999999552965164
+op.geo_bRes = 2
+op.geo_extrude = 0.0
+op.geo_offset = 0.0
+op.torus_p = 1
+op.torus_q = 1
+op.flip_p = False
+op.flip_q = False
+op.multiple_links = True
+op.torus_u = 248
+op.torus_v = 344
+op.torus_rP = 0.0
+op.torus_sP = 0.0
+op.mode = 'MAJOR_MINOR'
+op.torus_R = 0.0
+op.torus_r = 2.0
+op.torus_iR = 0.75
+op.torus_eR = 1.25
+op.torus_s = 1.0
+op.torus_h = 0.0
+op.torus_res = 198
+op.segment_res = 33
+op.outputType = 'BEZIER'
+op.handleType = 'AUTOMATIC'
+op.adaptive_resolution = False
diff --git a/presets/operator/curve.torus_knot_plus/slinky_knot.py b/presets/operator/curve.torus_knot_plus/slinky_knot.py
new file mode 100644
index 00000000..c1e32914
--- /dev/null
+++ b/presets/operator/curve.torus_knot_plus/slinky_knot.py
@@ -0,0 +1,38 @@
+import bpy
+op = bpy.context.active_operator
+
+op.rotation = (0.0, 0.0, 0.0)
+op.location = (0.0, 0.0, 0.0)
+op.view_align = False
+op.options_plus = True
+op.absolute_location = False
+op.use_colors = True
+op.colorSet = '2'
+op.random_colors = False
+op.saturation = 0.75
+op.geo_surface = True
+op.geo_bDepth = 0.017999999225139618
+op.geo_bRes = 3
+op.geo_extrude = 0.0
+op.geo_offset = 0.0
+op.torus_p = 10
+op.torus_q = 100
+op.flip_p = False
+op.flip_q = False
+op.multiple_links = True
+op.torus_u = 1
+op.torus_v = 1
+op.torus_rP = 0.0
+op.torus_sP = 0.0
+op.mode = 'MAJOR_MINOR'
+op.torus_R = 1.0
+op.torus_r = 0.4000000059604645
+op.torus_iR = 0.75
+op.torus_eR = 1.25
+op.torus_s = 1.0
+op.torus_h = 1.0
+op.torus_res = 208
+op.segment_res = 12
+op.outputType = 'BEZIER'
+op.handleType = 'AUTOMATIC'
+op.adaptive_resolution = True
diff --git a/presets/operator/curve.torus_knot_plus/snowflake_(2d).py b/presets/operator/curve.torus_knot_plus/snowflake_(2d).py
new file mode 100644
index 00000000..ad0c0a93
--- /dev/null
+++ b/presets/operator/curve.torus_knot_plus/snowflake_(2d).py
@@ -0,0 +1,38 @@
+import bpy
+op = bpy.context.active_operator
+
+op.location = (0.0, 0.0, 0.0)
+op.view_align = False
+op.rotation = (0.0, 0.0, 0.0)
+op.options_plus = True
+op.absolute_location = False
+op.use_colors = False
+op.colorSet = '1'
+op.random_colors = False
+op.saturation = 0.75
+op.geo_surface = True
+op.geo_bDepth = 0.019999999552965164
+op.geo_bRes = 2
+op.geo_extrude = 0.0
+op.geo_offset = 0.0
+op.torus_p = 10
+op.torus_q = 13
+op.flip_p = False
+op.flip_q = False
+op.multiple_links = True
+op.torus_u = 49
+op.torus_v = 84
+op.torus_rP = 0.0
+op.torus_sP = 0.0
+op.mode = 'MAJOR_MINOR'
+op.torus_R = 0.0
+op.torus_r = 2.0
+op.torus_iR = 0.75
+op.torus_eR = 1.25
+op.torus_s = 1.0
+op.torus_h = 0.0
+op.torus_res = 175
+op.segment_res = 33
+op.outputType = 'BEZIER'
+op.handleType = 'AUTOMATIC'
+op.adaptive_resolution = False
diff --git a/presets/operator/curve.torus_knot_plus/sun_cross_(2d).py b/presets/operator/curve.torus_knot_plus/sun_cross_(2d).py
new file mode 100644
index 00000000..f3a49b0d
--- /dev/null
+++ b/presets/operator/curve.torus_knot_plus/sun_cross_(2d).py
@@ -0,0 +1,38 @@
+import bpy
+op = bpy.context.active_operator
+
+op.location = (0.0, 0.0, 0.0)
+op.view_align = False
+op.rotation = (0.0, 0.0, 0.0)
+op.options_plus = True
+op.absolute_location = False
+op.use_colors = False
+op.colorSet = '1'
+op.random_colors = False
+op.saturation = 0.75
+op.geo_surface = True
+op.geo_bDepth = 0.019999999552965164
+op.geo_bRes = 2
+op.geo_extrude = 0.0
+op.geo_offset = 0.0
+op.torus_p = 11
+op.torus_q = 8
+op.flip_p = False
+op.flip_q = False
+op.multiple_links = True
+op.torus_u = 268
+op.torus_v = 438
+op.torus_rP = 0.0
+op.torus_sP = 0.0
+op.mode = 'MAJOR_MINOR'
+op.torus_R = 0.0
+op.torus_r = 2.0
+op.torus_iR = 0.75
+op.torus_eR = 1.25
+op.torus_s = 1.0
+op.torus_h = 0.0
+op.torus_res = 193
+op.segment_res = 33
+op.outputType = 'BEZIER'
+op.handleType = 'AUTOMATIC'
+op.adaptive_resolution = False
diff --git a/presets/operator/curve.torus_knot_plus/tripple_dna.py b/presets/operator/curve.torus_knot_plus/tripple_dna.py
new file mode 100644
index 00000000..ec3ce35b
--- /dev/null
+++ b/presets/operator/curve.torus_knot_plus/tripple_dna.py
@@ -0,0 +1,38 @@
+import bpy
+op = bpy.context.active_operator
+
+op.rotation = (0.0, 0.0, 0.0)
+op.location = (0.0, 0.0, 0.0)
+op.view_align = False
+op.options_plus = True
+op.absolute_location = False
+op.use_colors = True
+op.colorSet = '1'
+op.random_colors = False
+op.saturation = 0.75
+op.geo_surface = True
+op.geo_bDepth = 0.03999999910593033
+op.geo_bRes = 2
+op.geo_extrude = 0.0
+op.geo_offset = 0.0
+op.torus_p = 12
+op.torus_q = 14
+op.flip_p = False
+op.flip_q = False
+op.multiple_links = True
+op.torus_u = 1
+op.torus_v = 33
+op.torus_rP = 0.0
+op.torus_sP = 0.0
+op.mode = 'MAJOR_MINOR'
+op.torus_R = 1.7200000286102295
+op.torus_r = 0.25
+op.torus_iR = 0.75
+op.torus_eR = 1.25
+op.torus_s = 1.0
+op.torus_h = 1.0
+op.torus_res = 526
+op.segment_res = 40
+op.outputType = 'BEZIER'
+op.handleType = 'AUTOMATIC'
+op.adaptive_resolution = True
diff --git a/presets/operator/curve.torus_knot_plus/wicker_basket.py b/presets/operator/curve.torus_knot_plus/wicker_basket.py
new file mode 100644
index 00000000..b4367956
--- /dev/null
+++ b/presets/operator/curve.torus_knot_plus/wicker_basket.py
@@ -0,0 +1,38 @@
+import bpy
+op = bpy.context.active_operator
+
+op.location = (0.0, 0.0, 0.0)
+op.view_align = False
+op.rotation = (0.0, 0.0, 0.0)
+op.options_plus = True
+op.absolute_location = False
+op.use_colors = False
+op.colorSet = '1'
+op.random_colors = False
+op.saturation = 0.75
+op.geo_surface = True
+op.geo_bDepth = 0.019999999552965164
+op.geo_bRes = 2
+op.geo_extrude = 0.0
+op.geo_offset = 0.0
+op.torus_p = 4
+op.torus_q = 1
+op.flip_p = False
+op.flip_q = False
+op.multiple_links = True
+op.torus_u = 377
+op.torus_v = 233
+op.torus_rP = 0.0
+op.torus_sP = 0.0
+op.mode = 'MAJOR_MINOR'
+op.torus_R = 1.0
+op.torus_r = 0.10999999940395355
+op.torus_iR = 0.75
+op.torus_eR = 1.25
+op.torus_s = 1.0
+op.torus_h = 10.0
+op.torus_res = 457
+op.segment_res = 33
+op.outputType = 'BEZIER'
+op.handleType = 'AUTOMATIC'
+op.adaptive_resolution = False
diff --git a/render_povray/__init__.py b/render_povray/__init__.py
index bd68f5e9..f3f217cc 100644
--- a/render_povray/__init__.py
+++ b/render_povray/__init__.py
@@ -20,9 +20,10 @@
bl_info = {
"name": "POV-Ray 3.7",
- "author": "Campbell Barton, Silvio Falcinelli, Maurice Raybaud, Constantin Rahn, Bastien Montagne",
+ "author": "Campbell Barton, Silvio Falcinelli, Maurice Raybaud, "
+ "Constantin Rahn, Bastien Montagne, Leonid Desyatkov",
"version": (0, 0, 9),
- "blender": (2, 57, 0),
+ "blender": (2, 75, 0),
"location": "Render > Engine > POV-Ray 3.7",
"description": "Basic POV-Ray 3.7 integration for blender",
"warning": "this script is RC",
@@ -39,6 +40,7 @@ if "bpy" in locals():
else:
import bpy
+ import addon_utils # To use some other addons
from bpy.types import (
AddonPreferences,
PropertyGroup,
@@ -52,6 +54,7 @@ else:
FloatVectorProperty,
EnumProperty,
PointerProperty,
+ CollectionProperty,
)
from . import (
ui,
@@ -59,6 +62,7 @@ else:
update_files,
)
+
def string_strip_hyphen(name):
return name.replace("-", "")
@@ -69,34 +73,40 @@ class RenderPovSettingsScene(PropertyGroup):
# File Options
tempfiles_enable = BoolProperty(
name="Enable Tempfiles",
- description="Enable the OS-Tempfiles. Otherwise set the path where to save the files",
+ description="Enable the OS-Tempfiles. Otherwise set the path where"
+ " to save the files",
default=True)
pov_editor = BoolProperty(
name="POV-Ray editor",
- description="Don't Close POV-Ray editor after rendering (Overriden by /EXIT command)",
+ description="Don't Close POV-Ray editor after rendering (Overriden"
+ " by /EXIT command)",
default=False)
deletefiles_enable = BoolProperty(
name="Delete files",
- description="Delete files after rendering. Doesn't work with the image",
+ description="Delete files after rendering. "
+ "Doesn't work with the image",
default=True)
scene_name = StringProperty(
name="Scene Name",
- description="Name of POV-Ray scene to create. Empty name will use the name of "
- "the blend file",
+ description="Name of POV-Ray scene to create. Empty name will use "
+ "the name of the blend file",
maxlen=1024)
scene_path = StringProperty(
name="Export scene path",
- # description="Path to directory where the exported scene (POV and INI) is created", # Bug in POV-Ray RC3
+ # Bug in POV-Ray RC3
+ # description="Path to directory where the exported scene "
+ # "(POV and INI) is created",
description="Path to directory where the files are created",
maxlen=1024, subtype="DIR_PATH")
renderimage_path = StringProperty(
name="Rendered image path",
- description="Full path to directory where the rendered image is saved",
+ description="Full path to directory where the rendered image is "
+ "saved",
maxlen=1024, subtype="DIR_PATH")
list_lf_enable = BoolProperty(
name="LF in lists",
- description="Enable line breaks in lists (vectors and indices). Disabled: "
- "lists are exported in one line",
+ description="Enable line breaks in lists (vectors and indices). "
+ "Disabled: lists are exported in one line",
default=True)
# Not a real pov option, just to know if we should write
@@ -104,10 +114,12 @@ class RenderPovSettingsScene(PropertyGroup):
name="Enable Radiosity",
description="Enable POV-Rays radiosity calculation",
default=False)
+
radio_display_advanced = BoolProperty(
name="Advanced Options",
description="Show advanced options",
default=False)
+
media_enable = BoolProperty(
name="Enable Media",
description="Enable POV-Rays atmospheric media",
@@ -121,7 +133,9 @@ class RenderPovSettingsScene(PropertyGroup):
media_color = FloatVectorProperty(
name="Media Color", description="The atmospheric media color",
precision=4, step=0.01, min=0, soft_max=1,
- default=(0.001, 0.001, 0.001), options={'ANIMATABLE'}, subtype='COLOR')
+ default=(0.001, 0.001, 0.001),
+ options={'ANIMATABLE'},
+ subtype='COLOR')
baking_enable = BoolProperty(
name="Enable Baking",
@@ -147,8 +161,9 @@ class RenderPovSettingsScene(PropertyGroup):
# Real pov options
command_line_switches = StringProperty(
name="Command Line Switches",
- description="Command line switches consist of a + (plus) or - (minus) sign, followed "
- "by one or more alphabetic characters and possibly a numeric value",
+ description="Command line switches consist of a + (plus) or - "
+ "(minus) sign, followed by one or more alphabetic "
+ "characters and possibly a numeric value",
maxlen=500)
antialias_enable = BoolProperty(
@@ -157,13 +172,15 @@ class RenderPovSettingsScene(PropertyGroup):
antialias_method = EnumProperty(
name="Method",
- description="AA-sampling method. Type 1 is an adaptive, non-recursive, super-sampling "
- "method. Type 2 is an adaptive and recursive super-sampling method. "
- "Type 3 is a stochastic halton based super-sampling method",
+ description="AA-sampling method. Type 1 is an adaptive, "
+ "non-recursive, super-sampling method. Type 2 is an "
+ "adaptive and recursive super-sampling method. Type 3 "
+ "is a stochastic halton based super-sampling method",
items=(("0", "non-recursive AA", "Type 1 Sampling in POV-Ray"),
("1", "recursive AA", "Type 2 Sampling in POV-Ray"),
("2", "stochastic AA", "Type 3 Sampling in UberPOV")),
default="1")
+
antialias_confidence = FloatProperty(
name="Antialias Confidence",
description="how surely the computed color "
@@ -180,8 +197,9 @@ class RenderPovSettingsScene(PropertyGroup):
jitter_enable = BoolProperty(
name="Jitter",
- description="Enable Jittering. Adds noise into the sampling process (it should be "
- "avoided to use jitter in animation)",
+ description="Enable Jittering. Adds noise into the sampling "
+ "process (it should be avoided to use jitter in "
+ "animation)",
default=False)
jitter_amount = FloatProperty(
@@ -190,34 +208,56 @@ class RenderPovSettingsScene(PropertyGroup):
antialias_gamma = FloatProperty(
name="Antialias Gamma",
- description="POV-Ray compares gamma-adjusted values for super sampling. Antialias "
- "Gamma sets the Gamma before comparison",
+ description="POV-Ray compares gamma-adjusted values for super "
+ "sampling. Antialias Gamma sets the Gamma before "
+ "comparison",
min=0.0, max=5.0, soft_min=0.01, soft_max=2.5, default=2.5)
max_trace_level = IntProperty(
name="Max Trace Level",
- description="Number of reflections/refractions allowed on ray path",
+ description="Number of reflections/refractions allowed on ray "
+ "path",
min=1, max=256, default=5)
+ ########################### PHOTONS #######################################
+ photon_enable = BoolProperty(
+ name="Photons",
+ description="Enable global photons",
+ default=False)
+
+ photon_enable_count = BoolProperty(
+ name="Spacing / Count",
+ description="Enable count photons",
+ default=False)
+
+ photon_count = IntProperty(
+ name="Count",
+ description="Photons count",
+ min=1, max=100000000, default=20000)
+
photon_spacing = FloatProperty(
name="Spacing",
- description="Average distance between photons on surfaces. half this get four times "
- "as many surface photons",
- min=0.001, max=1.000, soft_min=0.001, soft_max=1.000, default=0.005, precision=3)
+ description="Average distance between photons on surfaces. half "
+ "this get four times as many surface photons",
+ min=0.001, max=1.000, default=0.005,
+ soft_min=0.001, soft_max=1.000, precision=3)
photon_max_trace_level = IntProperty(
name="Max Trace Level",
- description="Number of reflections/refractions allowed on ray path",
+ description="Number of reflections/refractions allowed on ray "
+ "path",
min=1, max=256, default=5)
photon_adc_bailout = FloatProperty(
name="ADC Bailout",
description="The adc_bailout for photons. Use adc_bailout = "
"0.01 / brightest_ambient_object for good results",
- min=0.0, max=1000.0, soft_min=0.0, soft_max=1.0, default=0.1, precision=3)
+ min=0.0, max=1000.0, default=0.1,
+ soft_min=0.0, soft_max=1.0, precision=3)
photon_gather_min = IntProperty(
- name="Gather Min", description="Minimum number of photons gathered for each point",
+ name="Gather Min", description="Minimum number of photons gathered"
+ "for each point",
min=1, max=256, default=20)
photon_gather_max = IntProperty(
@@ -228,7 +268,7 @@ class RenderPovSettingsScene(PropertyGroup):
name="ADC Bailout",
description="The adc_bailout for radiosity rays. Use "
"adc_bailout = 0.01 / brightest_ambient_object for good results",
- min=0.0, max=1000.0, soft_min=0.0, soft_max=1.0, default=0.01, precision=3)
+ min=0.0, max=1000.0, soft_min=0.0, soft_max=1.0, default=0.0039, precision=4)
radio_always_sample = BoolProperty(
name="Always Sample",
@@ -291,7 +331,7 @@ class RenderPovSettingsScene(PropertyGroup):
name="Recursion Limit",
description="how many recursion levels are used to calculate "
"the diffuse inter-reflection",
- min=1, max=20, default=3)
+ min=1, max=20, default=1)
radio_pretrace_start = FloatProperty(
name="Pretrace Start",
@@ -303,7 +343,7 @@ class RenderPovSettingsScene(PropertyGroup):
name="Pretrace End",
description="Fraction of the screen width which sets the size of the blocks "
"in the mosaic preview last pass",
- min=0.001, max=1.00, soft_min=0.01, soft_max=1.00, default=0.04, precision=3)
+ min=0.000925, max=1.00, soft_min=0.01, soft_max=1.00, default=0.04, precision=3)
###############################################################################
@@ -352,7 +392,8 @@ class RenderPovSettingsMaterial(PropertyGroup):
min=0.0, max=10.0, soft_min=0.000, soft_max=1.0, default=0)
interior_fade_color = FloatVectorProperty(
- name="Fade Color", description="Color of filtered attenuation for transparent materials",
+ name="Fade Color", description="Color of filtered attenuation for transparent "
+ "materials",
precision=4, step=0.01, min=0.0, soft_max=1.0,
default=(0, 0, 0), options={'ANIMATABLE'}, subtype='COLOR')
@@ -439,29 +480,44 @@ class RenderPovSettingsTexture(PropertyGroup):
tex_pattern_type = EnumProperty(
name="Texture_Type",
description="Choose between Blender or POV-Ray parameters to specify texture",
- items= (('agate', 'Agate', '','PLUGIN', 0), ('aoi', 'Aoi', '', 'PLUGIN', 1),
- ('average', 'Average', '', 'PLUGIN', 2), ('boxed', 'Boxed', '', 'PLUGIN', 3),
- ('bozo', 'Bozo', '', 'PLUGIN', 4), ('bumps', 'Bumps', '', 'PLUGIN', 5),
- ('cells', 'Cells', '', 'PLUGIN', 6), ('crackle', 'Crackle', '', 'PLUGIN', 7),
- ('cubic', 'Cubic', '', 'PLUGIN', 8), ('cylindrical', 'Cylindrical', '', 'PLUGIN', 9),
+ items= (('agate', 'Agate', '','PLUGIN', 0),
+ ('aoi', 'Aoi', '', 'PLUGIN', 1),
+ ('average', 'Average', '', 'PLUGIN', 2),
+ ('boxed', 'Boxed', '', 'PLUGIN', 3),
+ ('bozo', 'Bozo', '', 'PLUGIN', 4),
+ ('bumps', 'Bumps', '', 'PLUGIN', 5),
+ ('cells', 'Cells', '', 'PLUGIN', 6),
+ ('crackle', 'Crackle', '', 'PLUGIN', 7),
+ ('cubic', 'Cubic', '', 'PLUGIN', 8),
+ ('cylindrical', 'Cylindrical', '', 'PLUGIN', 9),
('density_file', 'Density', '(.df3)', 'PLUGIN', 10),
('dents', 'Dents', '', 'PLUGIN', 11),
('fractal', 'Fractal', '', 'PLUGIN', 12),
('function', 'Function', '', 'PLUGIN', 13),
- ('gradient', 'Gradient', '', 'PLUGIN', 14), ('granite', 'Granite', '', 'PLUGIN', 15),
+ ('gradient', 'Gradient', '', 'PLUGIN', 14),
+ ('granite', 'Granite', '', 'PLUGIN', 15),
('image_pattern', 'Image pattern', '', 'PLUGIN', 16),
('leopard', 'Leopard', '', 'PLUGIN', 17),
- ('marble', 'Marble', '', 'PLUGIN', 18), ('onion', 'Onion', '', 'PLUGIN', 19),
+ ('marble', 'Marble', '', 'PLUGIN', 18),
+ ('onion', 'Onion', '', 'PLUGIN', 19),
('pigment_pattern', 'pigment pattern', '', 'PLUGIN', 20),
- ('planar', 'Planar', '', 'PLUGIN', 21), ('quilted', 'Quilted', '', 'PLUGIN', 22),
- ('radial', 'Radial', '', 'PLUGIN', 23), ('ripples', 'Ripples', '', 'PLUGIN', 24),
+ ('planar', 'Planar', '', 'PLUGIN', 21),
+ ('quilted', 'Quilted', '', 'PLUGIN', 22),
+ ('radial', 'Radial', '', 'PLUGIN', 23),
+ ('ripples', 'Ripples', '', 'PLUGIN', 24),
('slope', 'Slope', '', 'PLUGIN', 25),
- ('spherical', 'Spherical', '', 'PLUGIN', 26), ('spiral1', 'Spiral1', '', 'PLUGIN', 27),
- ('spiral2', 'Spiral2', '', 'PLUGIN', 28), ('spotted', 'Spotted', '', 'PLUGIN', 29),
- ('waves', 'Waves', '', 'PLUGIN', 30), ('wood', 'Wood', '', 'PLUGIN', 31),
- ('wrinkles', 'Wrinkles', '', 'PLUGIN', 32), ('brick', "Brick", "", 'PLUGIN', 33),
- ('checker', "Checker", "", 'PLUGIN', 34), ('hexagon', "Hexagon", "", 'PLUGIN', 35),
- ('object', "Mesh", "", 'PLUGIN', 36), ('emulator', "Internal Emulator", "", 'PLUG', 37)),
+ ('spherical', 'Spherical', '', 'PLUGIN', 26),
+ ('spiral1', 'Spiral1', '', 'PLUGIN', 27),
+ ('spiral2', 'Spiral2', '', 'PLUGIN', 28),
+ ('spotted', 'Spotted', '', 'PLUGIN', 29),
+ ('waves', 'Waves', '', 'PLUGIN', 30),
+ ('wood', 'Wood', '', 'PLUGIN', 31),
+ ('wrinkles', 'Wrinkles', '', 'PLUGIN', 32),
+ ('brick', "Brick", "", 'PLUGIN', 33),
+ ('checker', "Checker", "", 'PLUGIN', 34),
+ ('hexagon', "Hexagon", "", 'PLUGIN', 35),
+ ('object', "Mesh", "", 'PLUGIN', 36),
+ ('emulator', "Internal Emulator", "", 'PLUG', 37)),
default='emulator',
)
@@ -479,8 +535,9 @@ class RenderPovSettingsTexture(PropertyGroup):
warp_types = EnumProperty(
name="Warp Types",
description="Select the type of warp",
- items=(('PLANAR', "Planar", ""), ('CUBIC', "Cubic", ""), ('SPHERICAL', "Spherical", ""),
- ('TOROIDAL', "Toroidal", ""), ('CYLINDRICAL', "Cylindrical", ""),('NONE', "None", "No indentation")),
+ items=(('PLANAR', "Planar", ""), ('CUBIC', "Cubic", ""),
+ ('SPHERICAL', "Spherical", ""), ('TOROIDAL', "Toroidal", ""),
+ ('CYLINDRICAL', "Cylindrical", ""), ('NONE', "None", "No indentation")),
default='NONE')
warp_orientation = EnumProperty(
@@ -492,8 +549,8 @@ class RenderPovSettingsTexture(PropertyGroup):
wave_type = EnumProperty(
name="Waves type",
description="Select the type of waves",
- items=(('ramp', "Ramp", ""), ('sine', "Sine", ""), ('scallop', "Scallop", ""), ('cubic', "Cubic", ""),
- ('poly', "Poly", ""), ('triangle', 'Triangle', "")),
+ items=(('ramp', "Ramp", ""), ('sine', "Sine", ""), ('scallop', "Scallop", ""),
+ ('cubic', "Cubic", ""), ('poly', "Poly", ""), ('triangle', 'Triangle', "")),
default='ramp')
gen_noise = IntProperty(
@@ -544,12 +601,14 @@ class RenderPovSettingsTexture(PropertyGroup):
modifier_phase = FloatProperty(
name="Phase",
- description="The phase value causes the map entries to be shifted so that the map starts and ends at a different place",
+ description="The phase value causes the map entries to be shifted so that the map "
+ "starts and ends at a different place",
min=0.0, max=2.0, default=0.0)
modifier_frequency = FloatProperty(
name="Frequency",
- description="The frequency keyword adjusts the number of times that a color map repeats over one cycle of a pattern",
+ description="The frequency keyword adjusts the number of times that a color map "
+ "repeats over one cycle of a pattern",
min=0.0, max=25.0, default=2.0)
modifier_turbulence = FloatProperty(
@@ -703,8 +762,8 @@ class RenderPovSettingsTexture(PropertyGroup):
description="",
min=0, max=4, default=0)
- #########FUNCTIONS#########################################################################################################################
- #########FUNCTIONS#########################################################################################################################
+ #########FUNCTIONS#############################################################################
+ #########FUNCTIONS#############################################################################
func_list = EnumProperty(
name="Functions",
@@ -715,21 +774,27 @@ class RenderPovSettingsTexture(PropertyGroup):
("f_bicorn","Bicorn",""), ("f_bifolia","Bifolia",""),
("f_blob","Blob",""), ("f_blob2","Blob2",""),
("f_boy_surface","Boy surface",""), ("f_comma","Comma",""),
- ("f_cross_ellipsoids","Cross ellipsoids",""), ("f_crossed_trough","Crossed trough",""),
- ("f_cubic_saddle","Cubic saddle",""), ("f_cushion","Cushion",""),
- ("f_devils_curve","Devils curve",""), ("f_devils_curve_2d","Devils curve 2d",""),
+ ("f_cross_ellipsoids","Cross ellipsoids",""),
+ ("f_crossed_trough","Crossed trough",""), ("f_cubic_saddle","Cubic saddle",""),
+ ("f_cushion","Cushion",""), ("f_devils_curve","Devils curve",""),
+ ("f_devils_curve_2d","Devils curve 2d",""),
("f_dupin_cyclid","Dupin cyclid",""), ("f_ellipsoid","Ellipsoid",""),
("f_enneper","Enneper",""), ("f_flange_cover","Flange cover",""),
- ("f_folium_surface","Folium surface",""), ("f_folium_surface_2d","Folium surface 2d",""),
- ("f_glob","Glob",""), ("f_heart","Heart",""),
- ("f_helical_torus","Helical torus",""), ("f_helix1","Helix1",""),
- ("f_helix2","Helix2",""), ("f_hex_x","Hex x",""),
+ ("f_folium_surface","Folium surface",""),
+ ("f_folium_surface_2d","Folium surface 2d",""), ("f_glob","Glob",""),
+ ("f_heart","Heart",""), ("f_helical_torus","Helical torus",""),
+ ("f_helix1","Helix1",""), ("f_helix2","Helix2",""), ("f_hex_x","Hex x",""),
("f_hex_y","Hex y",""), ("f_hetero_mf","Hetero mf",""),
- ("f_hunt_surface","Hunt surface",""), ("f_hyperbolic_torus","Hyperbolic torus",""),
- ("f_isect_ellipsoids","Isect ellipsoids",""), ("f_kampyle_of_eudoxus","Kampyle of eudoxus",""),
- ("f_kampyle_of_eudoxus_2d","Kampyle of eudoxus 2d",""), ("f_klein_bottle","Klein bottle",""),
- ("f_kummer_surface_v1","Kummer surface v1",""), ("f_kummer_surface_v2","Kummer surface v2",""),
- ("f_lemniscate_of_gerono","Lemniscate of gerono",""), ("f_lemniscate_of_gerono_2d","Lemniscate of gerono 2d",""),
+ ("f_hunt_surface","Hunt surface",""),
+ ("f_hyperbolic_torus","Hyperbolic torus",""),
+ ("f_isect_ellipsoids","Isect ellipsoids",""),
+ ("f_kampyle_of_eudoxus","Kampyle of eudoxus",""),
+ ("f_kampyle_of_eudoxus_2d","Kampyle of eudoxus 2d",""),
+ ("f_klein_bottle","Klein bottle",""),
+ ("f_kummer_surface_v1","Kummer surface v1",""),
+ ("f_kummer_surface_v2","Kummer surface v2",""),
+ ("f_lemniscate_of_gerono","Lemniscate of gerono",""),
+ ("f_lemniscate_of_gerono_2d","Lemniscate of gerono 2d",""),
("f_mesh1","Mesh1",""), ("f_mitre","Mitre",""),
("f_nodal_cubic","Nodal cubic",""), ("f_noise3d","Noise3d",""),
("f_noise_generator","Noise generator",""), ("f_odd","Odd",""),
@@ -738,7 +803,8 @@ class RenderPovSettingsTexture(PropertyGroup):
("f_pillow","Pillow",""), ("f_piriform","Piriform",""),
("f_piriform_2d","Piriform 2d",""), ("f_poly4","Poly4",""),
("f_polytubes","Polytubes",""), ("f_quantum","Quantum",""),
- ("f_quartic_paraboloid","Quartic paraboloid",""), ("f_quartic_saddle","Quartic saddle",""),
+ ("f_quartic_paraboloid","Quartic paraboloid",""),
+ ("f_quartic_saddle","Quartic saddle",""),
("f_quartic_cylinder","Quartic cylinder",""), ("f_r","R",""),
("f_ridge","Ridge",""), ("f_ridged_mf","Ridged mf",""),
("f_rounded_box","Rounded box",""), ("f_sphere","Sphere",""),
@@ -748,7 +814,8 @@ class RenderPovSettingsTexture(PropertyGroup):
("f_superellipsoid","Superellipsoid",""), ("f_th","Th",""),
("f_torus","Torus",""), ("f_torus2","Torus2",""),
("f_torus_gumdrop","Torus gumdrop",""), ("f_umbrella","Umbrella",""),
- ("f_witch_of_agnesi","Witch of agnesi",""), ("f_witch_of_agnesi_2d","Witch of agnesi 2d","")),
+ ("f_witch_of_agnesi","Witch of agnesi",""),
+ ("f_witch_of_agnesi_2d","Witch of agnesi 2d","")),
default='NONE')
@@ -879,10 +946,13 @@ class RenderPovSettingsTexture(PropertyGroup):
tex_scale_z = FloatProperty(
name="Scale Z",
description="",
- min=0.0, max=10000.0, default=1.0)
+ min=0.0, max=10000.0, default=1.0)
+
+
###############################################################################
# Object POV properties.
###############################################################################
+
class RenderPovSettingsObject(PropertyGroup):
# Importance sampling
importance_value = FloatProperty(
@@ -916,7 +986,399 @@ class RenderPovSettingsObject(PropertyGroup):
"it points at. Any POV shape expected e.g: isosurface {}",
default="")
+ #############POV-Ray specific object properties.############################
+ object_as = StringProperty(maxlen=1024)
+
+ imported_loc = FloatVectorProperty(
+ name="Imported Pov location",
+ precision=6,
+ default=(0.0, 0.0, 0.0))
+
+ unlock_parameters = BoolProperty(name="Lock",default = False)
+
+ curveshape = EnumProperty(
+ name="Povray Shape Type",
+ items=(("birail", "Birail", ""),
+ ("cairo", "Cairo", ""),
+ ("lathe", "Lathe", ""),
+ ("loft", "Loft", ""),
+ ("prism", "Prism", ""),
+ ("sphere_sweep", "Sphere Sweep", "")),
+ default="sphere_sweep")
+
+ mesh_write_as = EnumProperty(
+ name="Mesh Write As",
+ items=(("blobgrid", "Blob Grid", ""),
+ ("grid", "Grid", ""),
+ ("mesh", "Mesh", "")),
+ default="mesh")
+ # shape_as_light = StringProperty(name="Light",maxlen=1024)
+
+ # object_ior = FloatProperty(
+ # name="IOR", description="IOR",
+ # min=1.0, max=10.0,default=1.0)
+ # fake_caustics_power = FloatProperty(
+ # name="Power", description="Fake caustics power",
+ # min=0.0, max=10.0,default=0.0)
+ # target = BoolProperty(name="Target",description="",default=False)
+ # target_value = FloatProperty(
+ # name="Value", description="",
+ # min=0.0, max=1.0,default=1.0)
+ # refraction = BoolProperty(name="Refraction",description="",default=False)
+ # dispersion = BoolProperty(name="Dispersion",description="",default=False)
+ # dispersion_value = FloatProperty(
+ # name="Dispersion", description="Good values are 1.01 to 1.1. ",
+ # min=1.0, max=1.2,default=1.01)
+ # dispersion_samples = IntProperty(name="Samples",min=2, max=100,default=7)
+ # reflection = BoolProperty(name="Reflection",description="",default=False)
+ # pass_through = BoolProperty(name="Pass through",description="",default=False)
+ no_shadow = BoolProperty(name="No Shadow",default=False)
+
+ no_image = BoolProperty(name="No Image",default=False)
+
+ no_reflection = BoolProperty(name="No Reflection",default=False)
+
+ no_radiosity = BoolProperty(name="No Radiosity",default=False)
+
+ inverse = BoolProperty(name="Inverse",default=False)
+
+ sturm = BoolProperty(name="Sturm",default=False)
+
+ double_illuminate = BoolProperty(name="Double Illuminate",default=False)
+
+ hierarchy = BoolProperty(name="Hierarchy",default=False)
+
+ hollow = BoolProperty(name="Hollow",default=False)
+
+ boundorclip = EnumProperty(
+ name="Boundorclip",
+ items=(("none", "None", ""),
+ ("bounded_by", "Bounded_by", ""),
+ ("clipped_by", "Clipped_by", "")),
+ default="none")
+ boundorclipob = StringProperty(maxlen=1024)
+
+ addboundorclip = BoolProperty(description="",default=False)
+
+ blob_threshold = FloatProperty(name="Threshold",min=0.00, max=10.0, default=0.6)
+
+ cylinder_radius = FloatProperty(name="Cylinder R",min=0.00, max=10.0, default=0.04)
+
+
+
+ blob_strength = FloatProperty(name="Strength",min=-10.00, max=10.0, default=1.00)
+
+ res_u = IntProperty(name="U",min=100, max=1000, default=500)
+
+ res_v = IntProperty(name="V",min=100, max=1000, default=500)
+
+ contained_by = EnumProperty(
+ name="Contained by",
+ items=(("box", "Box", ""),
+ ("sphere", "Sphere", "")),
+ default="box")
+
+ container_scale = FloatProperty(name="Container Scale",min=0.0, max=10.0, default=1.00)
+
+ threshold = FloatProperty(name="Threshold",min=0.0, max=10.0, default=0.00)
+
+ accuracy = FloatProperty(name="Accuracy",min=0.0001, max=0.1, default=0.001)
+
+ max_gradient = FloatProperty(name="Max Gradient",min=0.0, max=100.0, default=5.0)
+
+ all_intersections = BoolProperty(name="All Intersections",default=False)
+
+ max_trace = IntProperty(name="Max Trace",min=1, max=100,default=1)
+
+
+
+ def prop_update_sphere(self, context):
+ bpy.ops.pov.sphere_update()
+ sphere_radius = FloatProperty(name="Sphere radius",min=0.00, max=10.0, default=0.5, update=prop_update_sphere)
+
+
+
+ def prop_update_cone(self, context):
+ bpy.ops.pov.cone_update()
+
+ cone_base_radius = FloatProperty(
+ name = "Base radius", description = "The first radius of the cone",
+ default = 1.0, min = 0.01, max = 100.0, update=prop_update_cone)
+ cone_cap_radius = FloatProperty(
+ name = "Cap radius", description = "The second radius of the cone",
+ default = 0.3, min = 0.0, max = 100.0, update=prop_update_cone)
+
+ cone_segments = IntProperty(
+ name = "Segments", description = "Radial segmentation of proxy mesh",
+ default = 16, min = 3, max = 265, update=prop_update_cone)
+
+ cone_height = FloatProperty(
+ name = "Height", description = "Height of the cone",
+ default = 2.0, min = 0.01, max = 100.0, update=prop_update_cone)
+ cone_base_z = FloatProperty()
+ cone_cap_z = FloatProperty()
+
+###########Parametric
+ def prop_update_parametric(self, context):
+ bpy.ops.pov.parametric_update()
+
+ u_min = FloatProperty(name = "U Min",
+ description = "",
+ default = 0.0, update=prop_update_parametric)
+ v_min = FloatProperty(name = "V Min",
+ description = "",
+ default = 0.0, update=prop_update_parametric)
+ u_max = FloatProperty(name = "U Max",
+ description = "",
+ default = 6.28, update=prop_update_parametric)
+ v_max = FloatProperty(name = "V Max",
+ description = "",
+ default = 12.57, update=prop_update_parametric)
+ x_eq = StringProperty(
+ maxlen=1024, default = "cos(v)*(1+cos(u))*sin(v/8)", update=prop_update_parametric)
+ y_eq = StringProperty(
+ maxlen=1024, default = "sin(u)*sin(v/8)+cos(v/8)*1.5", update=prop_update_parametric)
+ z_eq = StringProperty(
+ maxlen=1024, default = "sin(v)*(1+cos(u))*sin(v/8)", update=prop_update_parametric)
+
+###########Torus
+
+ def prop_update_torus(self, context):
+ bpy.ops.pov.torus_update()
+
+ torus_major_segments = IntProperty(
+ name = "Segments", description = "Radial segmentation of proxy mesh",
+ default = 48, min = 3, max = 720, update=prop_update_torus)
+ torus_minor_segments = IntProperty(
+ name = "Segments", description = "Cross-section segmentation of proxy mesh",
+ default = 12, min = 3, max = 720, update=prop_update_torus)
+ torus_major_radius = FloatProperty(
+ name="Major radius",
+ description="Major radius",
+ min=0.00, max=100.00, default=1.0, update=prop_update_torus)
+ torus_minor_radius = FloatProperty(
+ name="Minor radius",
+ description="Minor radius",
+ min=0.00, max=100.00, default=0.25, update=prop_update_torus)
+
+
+###########Rainbow
+ arc_angle = FloatProperty(name = "Arc angle",
+ description = "The angle of the raynbow arc in degrees",
+ default = 360, min = 0.01, max = 360.0)
+ falloff_angle = FloatProperty(name = "Falloff angle",
+ description = "The angle after which rainbow dissolves into background",
+ default = 360, min = 0.0, max = 360)
+
+###########HeightFields
+
+ quality = IntProperty(name = "Quality",
+ description = "",
+ default = 100, min = 1, max = 100)
+
+ hf_filename = StringProperty(maxlen = 1024)
+
+ hf_gamma = FloatProperty(
+ name="Gamma",
+ description="Gamma",
+ min=0.0001, max=20.0, default=1.0)
+
+ hf_premultiplied = BoolProperty(
+ name="Premultiplied",
+ description="Premultiplied",
+ default=True)
+
+ hf_smooth = BoolProperty(
+ name="Smooth",
+ description="Smooth",
+ default=False)
+
+ hf_water = FloatProperty(
+ name="Water Level",
+ description="Wather Level",
+ min=0.00, max=1.00, default=0.0)
+
+ hf_hierarchy = BoolProperty(
+ name="Hierarchy",
+ description="Height field hierarchy",
+ default=True)
+
+##############Superellipsoid
+ def prop_update_superellipsoid(self, context):
+ bpy.ops.pov.superellipsoid_update()
+
+ se_param1 = FloatProperty(
+ name="Parameter 1",
+ description="",
+ min=0.00, max=10.0, default=0.04)
+
+ se_param2 = FloatProperty(
+ name="Parameter 2",
+ description="",
+ min=0.00, max=10.0, default=0.04)
+
+ se_u = IntProperty(name = "U-segments",
+ description = "radial segmentation",
+ default = 20, min = 4, max = 265,
+ update=prop_update_superellipsoid)
+ se_v = IntProperty(name = "V-segments",
+ description = "lateral segmentation",
+ default = 20, min = 4, max = 265,
+ update=prop_update_superellipsoid)
+ se_n1 = FloatProperty(name = "Ring manipulator",
+ description = "Manipulates the shape of the Ring",
+ default = 1.0, min = 0.01, max = 100.0,
+ update=prop_update_superellipsoid)
+ se_n2 = FloatProperty(name = "Cross manipulator",
+ description = "Manipulates the shape of the cross-section",
+ default = 1.0, min = 0.01, max = 100.0,
+ update=prop_update_superellipsoid)
+ se_edit = EnumProperty(items=[("NOTHING", "Nothing", ""),
+ ("NGONS", "N-Gons", ""),
+ ("TRIANGLES", "Triangles", "")],
+ name="Fill up and down",
+ description="",
+ default='TRIANGLES',
+ update=prop_update_superellipsoid)
+#############Used for loft and Superellipsoid, etc.
+ curveshape = EnumProperty(
+ name="Povray Shape Type",
+ items=(("birail", "Birail", ""),
+ ("cairo", "Cairo", ""),
+ ("lathe", "Lathe", ""),
+ ("loft", "Loft", ""),
+ ("prism", "Prism", ""),
+ ("sphere_sweep", "Sphere Sweep", ""),
+ ("sor", "Surface of Revolution", "")),
+ default="sphere_sweep")
+
+#############Supertorus
+ def prop_update_supertorus(self, context):
+ bpy.ops.pov.supertorus_update()
+
+ st_major_radius = FloatProperty(
+ name="Major radius",
+ description="Major radius",
+ min=0.00, max=100.00, default=1.0,
+ update=prop_update_supertorus)
+
+ st_minor_radius = FloatProperty(
+ name="Minor radius",
+ description="Minor radius",
+ min=0.00, max=100.00, default=0.25,
+ update=prop_update_supertorus)
+
+ st_ring = FloatProperty(
+ name="Ring",
+ description="Ring manipulator",
+ min=0.0001, max=100.00, default=1.00,
+ update=prop_update_supertorus)
+
+ st_cross = FloatProperty(
+ name="Cross",
+ description="Cross manipulator",
+ min=0.0001, max=100.00, default=1.00,
+ update=prop_update_supertorus)
+
+ st_accuracy = FloatProperty(
+ name="Accuracy",
+ description="Supertorus accuracy",
+ min=0.00001, max=1.00, default=0.001)
+
+ st_max_gradient = FloatProperty(
+ name="Gradient",
+ description="Max gradient",
+ min=0.0001, max=100.00, default=10.00,
+ update=prop_update_supertorus)
+
+ st_R = FloatProperty(name = "big radius",
+ description = "The radius inside the tube",
+ default = 1.0, min = 0.01, max = 100.0,
+ update=prop_update_supertorus)
+ st_r = FloatProperty(name = "small radius",
+ description = "The radius of the tube",
+ default = 0.3, min = 0.01, max = 100.0,
+ update=prop_update_supertorus)
+ st_u = IntProperty(name = "U-segments",
+ description = "radial segmentation",
+ default = 16, min = 3, max = 265,
+ update=prop_update_supertorus)
+ st_v = IntProperty(name = "V-segments",
+ description = "lateral segmentation",
+ default = 8, min = 3, max = 265,
+ update=prop_update_supertorus)
+ st_n1 = FloatProperty(name = "Ring manipulator",
+ description = "Manipulates the shape of the Ring",
+ default = 1.0, min = 0.01, max = 100.0,
+ update=prop_update_supertorus)
+ st_n2 = FloatProperty(name = "Cross manipulator",
+ description = "Manipulates the shape of the cross-section",
+ default = 1.0, min = 0.01, max = 100.0,
+ update=prop_update_supertorus)
+ st_ie = BoolProperty(name = "Use Int.+Ext. radii",
+ description = "Use internal and external radii",
+ default = False,
+ update=prop_update_supertorus)
+ st_edit = BoolProperty(name="",
+ description="",
+ default=False,
+ options={'HIDDEN'},
+ update=prop_update_supertorus)
+
+########################Loft
+ loft_n = IntProperty(name = "Segments",
+ description = "Vertical segments",
+ default = 16, min = 3, max = 720)
+ loft_rings_bottom = IntProperty(name = "Bottom",
+ description = "Bottom rings",
+ default = 5, min = 2, max = 100)
+ loft_rings_side = IntProperty(name = "Side",
+ description = "Side rings",
+ default = 10, min = 2, max = 100)
+ loft_thick = FloatProperty(name = "Thickness",
+ description = "Manipulates the shape of the Ring",
+ default = 0.3, min = 0.01, max = 1.0)
+ loft_r = FloatProperty(name = "Radius",
+ description = "Radius",
+ default = 1, min = 0.01, max = 10)
+ loft_height = FloatProperty(name = "Height",
+ description = "Manipulates the shape of the Ring",
+ default = 2, min = 0.01, max = 10.0)
+
+###################Prism
+ prism_n = IntProperty(name = "Sides",
+ description = "Number of sides",
+ default = 5, min = 3, max = 720)
+ prism_r = FloatProperty(name = "Radius",
+ description = "Radius",
+ default = 1.0)
+
+##################Isosurface
+ iso_function_text = StringProperty(name="Function Text",maxlen=1024)#,update=iso_props_update_callback)
+
+##################PolygonToCircle
+ polytocircle_resolution = IntProperty(name = "Resolution",
+ description = "",
+ default = 3, min = 0, max = 256)
+ polytocircle_ngon = IntProperty(name = "NGon",
+ description = "",
+ min = 3, max = 64,default = 5)
+ polytocircle_ngonR = FloatProperty(name = "NGon Radius",
+ description = "",
+ default = 0.3)
+ polytocircle_circleR = FloatProperty(name = "Circle Radius",
+ description = "",
+ default = 1.0)
+
+#################Avogadro
+ # filename_ext = ".png"
+
+ # filter_glob = StringProperty(
+ # default="*.exr;*.gif;*.hdr;*.iff;*.jpeg;*.jpg;*.pgm;*.png;*.pot;*.ppm;*.sys;*.tga;*.tiff;*.EXR;*.GIF;*.HDR;*.IFF;*.JPEG;*.JPG;*.PGM;*.PNG;*.POT;*.PPM;*.SYS;*.TGA;*.TIFF",
+ # options={'HIDDEN'},
+ # )
+
###############################################################################
# Camera POV properties.
###############################################################################
@@ -965,6 +1427,7 @@ class RenderPovSettingsCamera(PropertyGroup):
default="")
+
###############################################################################
# Text POV properties.
###############################################################################
@@ -983,8 +1446,10 @@ class PovrayPreferences(AddonPreferences):
branch_feature_set_povray = EnumProperty(
name="Feature Set",
- description="Choose between official (POV-Ray) or (UberPOV) development branch features to write in the pov file",
- items= (('povray', 'Official POV-Ray', '','PLUGIN', 0), ('uberpov', 'Unofficial UberPOV', '', 'PLUGIN', 1)),
+ description="Choose between official (POV-Ray) or (UberPOV) "
+ "development branch features to write in the pov file",
+ items= (('povray', 'Official POV-Ray', '','PLUGIN', 0),
+ ('uberpov', 'Unofficial UberPOV', '', 'PLUGIN', 1)),
default='povray'
)
@@ -998,11 +1463,15 @@ class PovrayPreferences(AddonPreferences):
layout.prop(self, "branch_feature_set_povray")
layout.prop(self, "filepath_povray")
-
-
+
def register():
bpy.utils.register_module(__name__)
+ bpy.types.INFO_MT_add.prepend(ui.menu_func_add)
+ bpy.types.INFO_MT_file_import.append(ui.menu_func_import)
+ #used for parametric objects:
+ addon_utils.enable("add_mesh_extra_objects", default_set=False, persistent=True)
+
#bpy.types.TEXTURE_PT_context_texture.prepend(TEXTURE_PT_povray_type)
bpy.types.Scene.pov = PointerProperty(type=RenderPovSettingsScene)
bpy.types.Material.pov = PointerProperty(type=RenderPovSettingsMaterial)
@@ -1010,10 +1479,10 @@ def register():
bpy.types.Object.pov = PointerProperty(type=RenderPovSettingsObject)
bpy.types.Camera.pov = PointerProperty(type=RenderPovSettingsCamera)
bpy.types.Text.pov = PointerProperty(type=RenderPovSettingsText)
+
def unregister():
- bpy.utils.unregister_module(__name__)
#bpy.types.TEXTURE_PT_context_texture.remove(TEXTURE_PT_povray_type)
del bpy.types.Scene.pov
del bpy.types.Material.pov
@@ -1021,6 +1490,11 @@ def unregister():
del bpy.types.Object.pov
del bpy.types.Camera.pov
del bpy.types.Text.pov
+
+ addon_utils.disable("add_mesh_extra_objects", default_set=False)
+ bpy.types.INFO_MT_file_import.remove(ui.menu_func_import)
+ bpy.types.INFO_MT_add.remove(ui.menu_func_add)
+ bpy.utils.unregister_module(__name__)
if __name__ == "__main__":
diff --git a/render_povray/primitives.py b/render_povray/primitives.py
new file mode 100644
index 00000000..c668ed26
--- /dev/null
+++ b/render_povray/primitives.py
@@ -0,0 +1,1494 @@
+############ To get POV-Ray specific objects In and Out of Blender ###########
+
+import bpy
+#from . import render
+#from render import POVRAY_RENDER
+from bpy_extras.io_utils import ImportHelper
+from bpy_extras import object_utils
+from math import atan, pi, degrees, sqrt, cos, sin
+
+
+from bpy.props import (
+ StringProperty,
+ BoolProperty,
+ IntProperty,
+ FloatProperty,
+ FloatVectorProperty,
+ EnumProperty,
+ PointerProperty,
+ CollectionProperty,
+ )
+
+
+def pov_define_mesh(mesh, verts, edges, faces, name, hide_geometry=True):
+ if mesh is None:
+ mesh = bpy.data.meshes.new(name)
+ mesh.from_pydata(verts, edges, faces)
+ mesh.update()
+ mesh.validate(False) # Set it to True to see debug messages (helps ensure you generate valid geometry).
+ if hide_geometry:
+ mesh.vertices.foreach_set("hide", [True] * len(mesh.vertices))
+ mesh.edges.foreach_set("hide", [True] * len(mesh.edges))
+ mesh.polygons.foreach_set("hide", [True] * len(mesh.polygons))
+ return mesh
+
+
+class POVRAY_OT_lathe_add(bpy.types.Operator):
+ bl_idname = "pov.addlathe"
+ bl_label = "Lathe"
+ bl_options = {'REGISTER','UNDO'}
+ bl_description = "adds lathe"
+
+
+ def execute(self, context):
+ layers=[False]*20
+ layers[0]=True
+ bpy.ops.curve.primitive_bezier_curve_add(location=(0, 0, 0),
+ rotation=(0, 0, 0), layers=layers)
+ ob=context.scene.objects.active
+ ob.name = ob.data.name = "PovLathe"
+ ob.pov.object_as='LATHE'
+ bpy.ops.object.mode_set(mode='EDIT')
+ self.report({'WARNING'}, "This native POV-Ray primitive "
+ "won't have any vertex to show in edit mode")
+ bpy.ops.transform.rotate(value=-pi/2, axis=(0, 0, 1))
+ bpy.ops.object.mode_set(mode='OBJECT')
+ ob.pov.curveshape = "lathe"
+ bpy.ops.object.modifier_add(type='SCREW')
+ bpy.context.object.modifiers["Screw"].axis = 'Y'
+ bpy.context.object.modifiers["Screw"].show_render = False
+ return {'FINISHED'}
+
+
+
+def pov_superellipsoid_define(context, op, ob):
+
+ if op:
+ mesh = None
+
+ u = op.se_u
+ v = op.se_v
+ n1 = op.se_n1
+ n2 = op.se_n2
+ edit = op.se_edit
+ se_param1 = n2 # op.se_param1
+ se_param2 = n1 # op.se_param2
+
+ else:
+ assert(ob)
+ mesh = ob.data
+
+ u = ob.pov.se_u
+ v = ob.pov.se_v
+ n1 = ob.pov.se_n1
+ n2 = ob.pov.se_n2
+ edit = ob.pov.se_edit
+ se_param1 = ob.pov.se_param1
+ se_param2 = ob.pov.se_param2
+
+ verts = []
+ r=1
+
+ stepSegment=360/v*pi/180
+ stepRing=pi/u
+ angSegment=0
+ angRing=-pi/2
+
+ step=0
+ for ring in range(0,u-1):
+ angRing += stepRing
+ for segment in range(0,v):
+ step += 1
+ angSegment += stepSegment
+ x = r*(abs(cos(angRing))**n1)*(abs(cos(angSegment))**n2)
+ if (cos(angRing) < 0 and cos(angSegment) > 0) or \
+ (cos(angRing) > 0 and cos(angSegment) < 0):
+ x = -x
+ y = r*(abs(cos(angRing))**n1)*(abs(sin(angSegment))**n2)
+ if (cos(angRing) < 0 and sin(angSegment) > 0) or \
+ (cos(angRing) > 0 and sin(angSegment) < 0):
+ y = -y
+ z = r*(abs(sin(angRing))**n1)
+ if sin(angRing) < 0:
+ z = -z
+ x = round(x,4)
+ y = round(y,4)
+ z = round(z,4)
+ verts.append((x,y,z))
+ if edit == 'TRIANGLES':
+ verts.append((0,0,1))
+ verts.append((0,0,-1))
+
+ faces = []
+
+ for i in range(0,u-2):
+ m=i*v
+ for p in range(0,v):
+ if p < v-1:
+ face=(m+p,1+m+p,v+1+m+p,v+m+p)
+ if p == v-1:
+ face=(m+p,m,v+m,v+m+p)
+ faces.append(face)
+ if edit == 'TRIANGLES':
+ indexUp=len(verts)-2
+ indexDown=len(verts)-1
+ indexStartDown=len(verts)-2-v
+ for i in range(0,v):
+ if i < v-1:
+ face=(indexDown,i,i+1)
+ faces.append(face)
+ if i == v-1:
+ face=(indexDown,i,0)
+ faces.append(face)
+ for i in range(0,v):
+ if i < v-1:
+ face=(indexUp,i+indexStartDown,i+indexStartDown+1)
+ faces.append(face)
+ if i == v-1:
+ face=(indexUp,i+indexStartDown,indexStartDown)
+ faces.append(face)
+ if edit == 'NGONS':
+ face=[]
+ for i in range(0,v):
+ face.append(i)
+ faces.append(face)
+ face=[]
+ indexUp=len(verts)-1
+ for i in range(0,v):
+ face.append(indexUp-i)
+ faces.append(face)
+ mesh = pov_define_mesh(mesh, verts, [], faces, "SuperEllipsoid")
+
+ if not ob:
+ ob_base = object_utils.object_data_add(context, mesh, operator=None)
+ ob = ob_base.object
+ #engine = context.scene.render.engine what for?
+ ob = context.object
+ ob.name = ob.data.name = "PovSuperellipsoid"
+ ob.pov.object_as = 'SUPERELLIPSOID'
+ ob.pov.se_param1 = n2
+ ob.pov.se_param2 = n1
+
+ ob.pov.se_u = u
+ ob.pov.se_v = v
+ ob.pov.se_n1 = n1
+ ob.pov.se_n2 = n2
+ ob.pov.se_edit = edit
+
+ bpy.ops.object.mode_set(mode="EDIT")
+ bpy.ops.mesh.hide(unselected=False)
+ bpy.ops.object.mode_set(mode="OBJECT")
+
+class POVRAY_OT_superellipsoid_add(bpy.types.Operator):
+ bl_idname = "pov.addsuperellipsoid"
+ bl_label = "Add SuperEllipsoid"
+ bl_description = "Create a SuperEllipsoid"
+ bl_options = {'REGISTER', 'UNDO'}
+ COMPAT_ENGINES = {'POVRAY_RENDER'}
+
+ # XXX Keep it in sync with __init__'s RenderPovSettingsConePrimitive
+ # If someone knows how to define operators' props from a func, I'd be delighted to learn it!
+ se_param1 = FloatProperty(
+ name="Parameter 1",
+ description="",
+ min=0.00, max=10.0, default=0.04)
+
+ se_param2 = FloatProperty(
+ name="Parameter 2",
+ description="",
+ min=0.00, max=10.0, default=0.04)
+
+ se_u = IntProperty(name = "U-segments",
+ description = "radial segmentation",
+ default = 20, min = 4, max = 265)
+ se_v = IntProperty(name = "V-segments",
+ description = "lateral segmentation",
+ default = 20, min = 4, max = 265)
+ se_n1 = FloatProperty(name = "Ring manipulator",
+ description = "Manipulates the shape of the Ring",
+ default = 1.0, min = 0.01, max = 100.0)
+ se_n2 = FloatProperty(name = "Cross manipulator",
+ description = "Manipulates the shape of the cross-section",
+ default = 1.0, min = 0.01, max = 100.0)
+ se_edit = EnumProperty(items=[("NOTHING", "Nothing", ""),
+ ("NGONS", "N-Gons", ""),
+ ("TRIANGLES", "Triangles", "")],
+ name="Fill up and down",
+ description="",
+ default='TRIANGLES')
+
+ @classmethod
+ def poll(cls, context):
+ engine = context.scene.render.engine
+ return (engine in cls.COMPAT_ENGINES)
+
+ def execute(self,context):
+ pov_superellipsoid_define(context, self, None)
+
+ self.report({'WARNING'}, "This native POV-Ray primitive won't have any vertex to show in edit mode")
+
+ return {'FINISHED'}
+
+class POVRAY_OT_superellipsoid_update(bpy.types.Operator):
+ bl_idname = "pov.superellipsoid_update"
+ bl_label = "Update"
+ bl_description = "Update Superellipsoid"
+ bl_options = {'REGISTER', 'UNDO'}
+ COMPAT_ENGINES = {'POVRAY_RENDER'}
+
+ @classmethod
+ def poll(cls, context):
+ engine = context.scene.render.engine
+ ob = context.object
+ return (ob and ob.data and ob.type == 'MESH' and engine in cls.COMPAT_ENGINES)
+
+ def execute(self, context):
+ bpy.ops.object.mode_set(mode="EDIT")
+ bpy.ops.mesh.reveal()
+ bpy.ops.mesh.select_all(action='SELECT')
+ bpy.ops.mesh.delete(type='VERT')
+ bpy.ops.object.mode_set(mode="OBJECT")
+
+ pov_superellipsoid_define(context, None, context.object)
+
+ return {'FINISHED'}
+
+def createFaces(vertIdx1, vertIdx2, closed=False, flipped=False):
+ faces = []
+ if not vertIdx1 or not vertIdx2:
+ return None
+ if len(vertIdx1) < 2 and len(vertIdx2) < 2:
+ return None
+ fan = False
+ if (len(vertIdx1) != len(vertIdx2)):
+ if (len(vertIdx1) == 1 and len(vertIdx2) > 1):
+ fan = True
+ else:
+ return None
+ total = len(vertIdx2)
+ if closed:
+ if flipped:
+ face = [
+ vertIdx1[0],
+ vertIdx2[0],
+ vertIdx2[total - 1]]
+ if not fan:
+ face.append(vertIdx1[total - 1])
+ faces.append(face)
+
+ else:
+ face = [vertIdx2[0], vertIdx1[0]]
+ if not fan:
+ face.append(vertIdx1[total - 1])
+ face.append(vertIdx2[total - 1])
+ faces.append(face)
+ for num in range(total - 1):
+ if flipped:
+ if fan:
+ face = [vertIdx2[num], vertIdx1[0], vertIdx2[num + 1]]
+ else:
+ face = [vertIdx2[num], vertIdx1[num],
+ vertIdx1[num + 1], vertIdx2[num + 1]]
+ faces.append(face)
+ else:
+ if fan:
+ face = [vertIdx1[0], vertIdx2[num], vertIdx2[num + 1]]
+ else:
+ face = [vertIdx1[num], vertIdx2[num],
+ vertIdx2[num + 1], vertIdx1[num + 1]]
+ faces.append(face)
+
+ return faces
+
+def power(a,b):
+ if a < 0:
+ return -((-a)**b)
+ return a**b
+
+def supertoroid(R,r,u,v,n1,n2):
+ a = 2*pi/u
+ b = 2*pi/v
+ verts = []
+ faces = []
+ for i in range(u):
+ s = power(sin(i*a),n1)
+ c = power(cos(i*a),n1)
+ for j in range(v):
+ c2 = R+r*power(cos(j*b),n2)
+ s2 = r*power(sin(j*b),n2)
+ verts.append((c*c2,s*c2,s2))# type as a (mathutils.Vector(c*c2,s*c2,s2))?
+ if i > 0:
+ f = createFaces(range((i-1)*v,i*v),range(i*v,(i+1)*v),closed = True)
+ faces.extend(f)
+ f = createFaces(range((u-1)*v,u*v),range(v),closed=True)
+ faces.extend(f)
+ return verts, faces
+
+def pov_supertorus_define(context, op, ob):
+ if op:
+ mesh = None
+ st_R = op.st_R
+ st_r = op.st_r
+ st_u = op.st_u
+ st_v = op.st_v
+ st_n1 = op.st_n1
+ st_n2 = op.st_n2
+ st_ie = op.st_ie
+ st_edit = op.st_edit
+
+ else:
+ assert(ob)
+ mesh = ob.data
+ st_R = ob.pov.st_major_radius
+ st_r = ob.pov.st_minor_radius
+ st_u = ob.pov.st_u
+ st_v = ob.pov.st_v
+ st_n1 = ob.pov.st_ring
+ st_n2 = ob.pov.st_cross
+ st_ie = ob.pov.st_ie
+ st_edit = ob.pov.st_edit
+
+ if st_ie:
+ rad1 = (st_R+st_r)/2
+ rad2 = (st_R-st_r)/2
+ if rad2 > rad1:
+ [rad1,rad2] = [rad2,rad1]
+ else:
+ rad1 = st_R
+ rad2 = st_r
+ if rad2 > rad1:
+ rad1 = rad2
+ verts,faces = supertoroid(rad1,
+ rad2,
+ st_u,
+ st_v,
+ st_n1,
+ st_n2)
+ mesh = pov_define_mesh(mesh, verts, [], faces, "PovSuperTorus", True)
+ if not ob:
+ ob_base = object_utils.object_data_add(context, mesh, operator=None)
+
+ ob = ob_base.object
+ ob.pov.object_as = 'SUPERTORUS'
+ ob.pov.st_major_radius = st_R
+ ob.pov.st_minor_radius = st_r
+ ob.pov.st_u = st_u
+ ob.pov.st_v = st_v
+ ob.pov.st_ring = st_n1
+ ob.pov.st_cross = st_n2
+ ob.pov.st_ie = st_ie
+ ob.pov.st_edit = st_edit
+
+class POVRAY_OT_supertorus_add(bpy.types.Operator):
+ bl_idname = "pov.addsupertorus"
+ bl_label = "Add Supertorus"
+ bl_description = "Create a SuperTorus"
+ bl_options = {'REGISTER', 'UNDO'}
+ COMPAT_ENGINES = {'POVRAY_RENDER'}
+
+ st_R = FloatProperty(name = "big radius",
+ description = "The radius inside the tube",
+ default = 1.0, min = 0.01, max = 100.0)
+ st_r = FloatProperty(name = "small radius",
+ description = "The radius of the tube",
+ default = 0.3, min = 0.01, max = 100.0)
+ st_u = IntProperty(name = "U-segments",
+ description = "radial segmentation",
+ default = 16, min = 3, max = 265)
+ st_v = IntProperty(name = "V-segments",
+ description = "lateral segmentation",
+ default = 8, min = 3, max = 265)
+ st_n1 = FloatProperty(name = "Ring manipulator",
+ description = "Manipulates the shape of the Ring",
+ default = 1.0, min = 0.01, max = 100.0)
+ st_n2 = FloatProperty(name = "Cross manipulator",
+ description = "Manipulates the shape of the cross-section",
+ default = 1.0, min = 0.01, max = 100.0)
+ st_ie = BoolProperty(name = "Use Int.+Ext. radii",
+ description = "Use internal and external radii",
+ default = False)
+ st_edit = BoolProperty(name="",
+ description="",
+ default=False,
+ options={'HIDDEN'})
+
+ @classmethod
+ def poll(cls, context):
+ engine = context.scene.render.engine
+ return (engine in cls.COMPAT_ENGINES)
+
+ def execute(self, context):
+ pov_supertorus_define(context, self, None)
+
+ self.report({'WARNING'}, "This native POV-Ray primitive won't have any vertex to show in edit mode")
+ return {'FINISHED'}
+
+class POVRAY_OT_supertorus_update(bpy.types.Operator):
+ bl_idname = "pov.supertorus_update"
+ bl_label = "Update"
+ bl_description = "Update SuperTorus"
+ bl_options = {'REGISTER', 'UNDO'}
+ COMPAT_ENGINES = {'POVRAY_RENDER'}
+
+ @classmethod
+ def poll(cls, context):
+ engine = context.scene.render.engine
+ ob = context.object
+ return (ob and ob.data and ob.type == 'MESH' and engine in cls.COMPAT_ENGINES)
+
+ def execute(self, context):
+ bpy.ops.object.mode_set(mode="EDIT")
+ bpy.ops.mesh.reveal()
+ bpy.ops.mesh.select_all(action='SELECT')
+ bpy.ops.mesh.delete(type='VERT')
+ bpy.ops.object.mode_set(mode="OBJECT")
+
+ pov_supertorus_define(context, None, context.object)
+
+ return {'FINISHED'}
+#########################################################################################################
+class POVRAY_OT_loft_add(bpy.types.Operator):
+ bl_idname = "pov.addloft"
+ bl_label = "Add Loft Data"
+ bl_description = "Create a Curve data for Meshmaker"
+ bl_options = {'REGISTER', 'UNDO'}
+ COMPAT_ENGINES = {'POVRAY_RENDER'}
+
+ loft_n = IntProperty(name = "Segments",
+ description = "Vertical segments",
+ default = 16, min = 3, max = 720)
+ loft_rings_bottom = IntProperty(name = "Bottom",
+ description = "Bottom rings",
+ default = 5, min = 2, max = 100)
+ loft_rings_side = IntProperty(name = "Side",
+ description = "Side rings",
+ default = 10, min = 2, max = 100)
+ loft_thick = FloatProperty(name = "Thickness",
+ description = "Manipulates the shape of the Ring",
+ default = 0.3, min = 0.01, max = 1.0)
+ loft_r = FloatProperty(name = "Radius",
+ description = "Radius",
+ default = 1, min = 0.01, max = 10)
+ loft_height = FloatProperty(name = "Height",
+ description = "Manipulates the shape of the Ring",
+ default = 2, min = 0.01, max = 10.0)
+
+ def execute(self,context):
+
+ props = self.properties
+ loftData = bpy.data.curves.new('Loft', type='CURVE')
+ loftData.dimensions = '3D'
+ loftData.resolution_u = 2
+ loftData.show_normal_face = False
+ n=props.loft_n
+ thick = props.loft_thick
+ side = props.loft_rings_side
+ bottom = props.loft_rings_bottom
+ h = props.loft_height
+ r = props.loft_r
+ distB = r/bottom
+ r0 = 0.00001
+ z = -h/2
+ print("New")
+ for i in range(bottom+1):
+ coords = []
+ angle = 0
+ for p in range(n):
+ x = r0*cos(angle)
+ y = r0*sin(angle)
+ coords.append((x,y,z))
+ angle+=pi*2/n
+ r0+=distB
+ nurbs = loftData.splines.new('NURBS')
+ nurbs.points.add(len(coords)-1)
+ for i, coord in enumerate(coords):
+ x,y,z = coord
+ nurbs.points[i].co = (x, y, z, 1)
+ nurbs.use_cyclic_u = True
+ for i in range(side):
+ z+=h/side
+ coords = []
+ angle = 0
+ for p in range(n):
+ x = r*cos(angle)
+ y = r*sin(angle)
+ coords.append((x,y,z))
+ angle+=pi*2/n
+ nurbs = loftData.splines.new('NURBS')
+ nurbs.points.add(len(coords)-1)
+ for i, coord in enumerate(coords):
+ x,y,z = coord
+ nurbs.points[i].co = (x, y, z, 1)
+ nurbs.use_cyclic_u = True
+ r-=thick
+ for i in range(side):
+ coords = []
+ angle = 0
+ for p in range(n):
+ x = r*cos(angle)
+ y = r*sin(angle)
+ coords.append((x,y,z))
+ angle+=pi*2/n
+ nurbs = loftData.splines.new('NURBS')
+ nurbs.points.add(len(coords)-1)
+ for i, coord in enumerate(coords):
+ x,y,z = coord
+ nurbs.points[i].co = (x, y, z, 1)
+ nurbs.use_cyclic_u = True
+ z-=h/side
+ z = (-h/2) + thick
+ distB = (r-0.00001)/bottom
+ for i in range(bottom+1):
+ coords = []
+ angle = 0
+ for p in range(n):
+ x = r*cos(angle)
+ y = r*sin(angle)
+ coords.append((x,y,z))
+ angle+=pi*2/n
+ r-=distB
+ nurbs = loftData.splines.new('NURBS')
+ nurbs.points.add(len(coords)-1)
+ for i, coord in enumerate(coords):
+ x,y,z = coord
+ nurbs.points[i].co = (x, y, z, 1)
+ nurbs.use_cyclic_u = True
+ ob = bpy.data.objects.new('Loft_shape', loftData)
+ scn = bpy.context.scene
+ scn.objects.link(ob)
+ scn.objects.active = ob
+ ob.select = True
+ ob.pov.curveshape = "loft"
+ return {'FINISHED'}
+
+class POVRAY_OT_plane_add(bpy.types.Operator):
+ bl_idname = "pov.addplane"
+ bl_label = "Plane"
+ bl_description = "Add Plane"
+ bl_options = {'REGISTER', 'UNDO'}
+
+ def execute(self,context):
+ layers = 20*[False]
+ layers[0] = True
+ bpy.ops.mesh.primitive_plane_add(radius = 100000,layers=layers)
+ ob = context.object
+ ob.name = ob.data.name = 'PovInfinitePlane'
+ bpy.ops.object.mode_set(mode="EDIT")
+ self.report({'WARNING'}, "This native POV-Ray primitive "
+ "won't have any vertex to show in edit mode")
+ bpy.ops.mesh.hide(unselected=False)
+ bpy.ops.object.mode_set(mode="OBJECT")
+ bpy.ops.object.shade_smooth()
+ ob.pov.object_as = "PLANE"
+ return {'FINISHED'}
+
+class POVRAY_OT_box_add(bpy.types.Operator):
+ bl_idname = "pov.addbox"
+ bl_label = "Box"
+ bl_description = "Add Box"
+ bl_options = {'REGISTER', 'UNDO'}
+
+ def execute(self,context):
+ layers = 20*[False]
+ layers[0] = True
+ bpy.ops.mesh.primitive_cube_add(layers=layers)
+ ob = context.object
+ ob.name = ob.data.name = 'PovBox'
+ bpy.ops.object.mode_set(mode="EDIT")
+ self.report({'WARNING'}, "This native POV-Ray primitive "
+ "won't have any vertex to show in edit mode")
+ bpy.ops.mesh.hide(unselected=False)
+ bpy.ops.object.mode_set(mode="OBJECT")
+ ob.pov.object_as = "BOX"
+ return {'FINISHED'}
+
+class POVRAY_OT_cylinder_add(bpy.types.Operator):
+ bl_idname = "pov.addcylinder"
+ bl_label = "Cylinder"
+ bl_description = "Add Cylinder"
+ bl_options = {'REGISTER', 'UNDO'}
+
+ def execute(self,context):
+ layers = 20*[False]
+ layers[0] = True
+ bpy.ops.mesh.primitive_cylinder_add(layers = layers)
+ ob = context.object
+ ob.name = ob.data.name = 'PovCylinder'
+ bpy.ops.object.mode_set(mode="EDIT")
+ self.report({'WARNING'}, "This native POV-Ray primitive "
+ "won't have any vertex to show in edit mode")
+ bpy.ops.mesh.hide(unselected=False)
+ bpy.ops.object.mode_set(mode="OBJECT")
+ ob.pov.object_as = "CYLINDER"
+
+ return {'FINISHED'}
+################################SPHERE##########################################
+def pov_sphere_define(context, op, ob, loc):
+ if op:
+ R = op.R
+
+ else:
+ assert(ob)
+ R = ob.pov.sphere_radius
+
+ #keep object rotation and location for the add object operator
+ obrot = ob.rotation_euler
+ #obloc = ob.location
+ obscale = ob.scale
+
+ bpy.ops.object.mode_set(mode="EDIT")
+ bpy.ops.mesh.reveal()
+ bpy.ops.mesh.select_all(action='SELECT')
+ bpy.ops.mesh.delete(type='VERT')
+ bpy.ops.mesh.primitive_ico_sphere_add(subdivisions=4, size=ob.pov.sphere_radius, location=loc, rotation=obrot)
+ #bpy.ops.transform.rotate(axis=obrot,constraint_orientation='GLOBAL')
+ bpy.ops.transform.resize(value=obscale)
+ #bpy.ops.transform.rotate(axis=obrot, proportional_size=1)
+
+
+ bpy.ops.mesh.hide(unselected=False)
+ bpy.ops.object.mode_set(mode="OBJECT")
+ #bpy.ops.transform.rotate(axis=obrot,constraint_orientation='GLOBAL')
+
+ if not ob:
+ bpy.ops.mesh.primitive_ico_sphere_add(subdivisions=4, size=R, location=loc)
+ ob = context.object
+ ob.name = ob.data.name = "PovSphere"
+ ob.pov.object_as = "SPHERE"
+ ob.pov.sphere_radius = R
+ bpy.ops.object.mode_set(mode="EDIT")
+ bpy.ops.mesh.hide(unselected=False)
+ bpy.ops.object.mode_set(mode="OBJECT")
+class POVRAY_OT_sphere_add(bpy.types.Operator):
+ bl_idname = "pov.addsphere"
+ bl_label = "Sphere"
+ bl_description = "Add Sphere Shape"
+ bl_options = {'REGISTER', 'UNDO'}
+
+ # XXX Keep it in sync with __init__'s torus Primitive
+ R = FloatProperty(name="Sphere radius",min=0.00, max=10.0, default=0.5)
+
+ imported_loc = FloatVectorProperty(
+ name="Imported Pov location",
+ precision=6,
+ default=(0.0, 0.0, 0.0))
+
+ def execute(self,context):
+ props = self.properties
+ R = props.R
+ ob = context.object
+ if ob:
+ if ob.pov.imported_loc:
+ LOC = ob.pov.imported_loc
+ else:
+ LOC = bpy.context.scene.cursor_location
+ pov_sphere_define(context, self, None, LOC)
+ self.report({'WARNING'}, "This native POV-Ray primitive "
+ "won't have any vertex to show in edit mode")
+ return {'FINISHED'}
+
+ # def execute(self,context):
+ # layers = 20*[False]
+ # layers[0] = True
+
+ # bpy.ops.mesh.primitive_ico_sphere_add(subdivisions=4, radius=ob.pov.sphere_radius, layers=layers)
+ # ob = context.object
+ # bpy.ops.object.mode_set(mode="EDIT")
+ # self.report({'WARNING'}, "This native POV-Ray primitive "
+ # "won't have any vertex to show in edit mode")
+ # bpy.ops.mesh.hide(unselected=False)
+ # bpy.ops.object.mode_set(mode="OBJECT")
+ # bpy.ops.object.shade_smooth()
+ # ob.pov.object_as = "SPHERE"
+ # ob.name = ob.data.name = 'PovSphere'
+ # return {'FINISHED'}
+class POVRAY_OT_sphere_update(bpy.types.Operator):
+ bl_idname = "pov.sphere_update"
+ bl_label = "Update"
+ bl_description = "Update Sphere"
+ bl_options = {'REGISTER', 'UNDO'}
+ COMPAT_ENGINES = {'POVRAY_RENDER'}
+
+ @classmethod
+ def poll(cls, context):
+ engine = context.scene.render.engine
+ ob = context.object
+ return (ob and ob.data and ob.type == 'MESH' and engine in cls.COMPAT_ENGINES)
+
+ def execute(self, context):
+
+ pov_sphere_define(context, None, context.object,context.object.location)
+
+ return {'FINISHED'}
+
+
+####################################CONE#######################################
+def pov_cone_define(context, op, ob):
+ verts = []
+ faces = []
+ if op:
+ mesh = None
+ base = op.base
+ cap = op.cap
+ seg = op.seg
+ height = op.height
+ else:
+ assert(ob)
+ mesh = ob.data
+ base = ob.pov.cone_base_radius
+ cap = ob.pov.cone_cap_radius
+ seg = ob.pov.cone_segments
+ height = ob.pov.cone_height
+
+ zc = height / 2
+ zb = -zc
+ angle = 2 * pi / seg
+ t = 0
+ for i in range(seg):
+ xb = base * cos(t)
+ yb = base * sin(t)
+ xc = cap * cos(t)
+ yc = cap * sin(t)
+ verts.append((xb, yb, zb))
+ verts.append((xc, yc, zc))
+ t += angle
+ for i in range(seg):
+ f = i * 2
+ if i == seg - 1:
+ faces.append([0, 1, f + 1, f])
+ else:
+ faces.append([f + 2, f + 3, f + 1, f])
+ if base != 0:
+ base_face = []
+ for i in range(seg - 1, -1, -1):
+ p = i * 2
+ base_face.append(p)
+ faces.append(base_face)
+ if cap != 0:
+ cap_face = []
+ for i in range(seg):
+ p = i * 2 + 1
+ cap_face.append(p)
+ faces.append(cap_face)
+
+ mesh = pov_define_mesh(mesh, verts, [], faces, "PovCone", True)
+ if not ob:
+ ob_base = object_utils.object_data_add(context, mesh, operator=None)
+ ob = ob_base.object
+ ob.pov.object_as = "CONE"
+ ob.pov.cone_base_radius = base
+ ob.pov.cone_cap_radius = cap
+ ob.pov.cone_height = height
+ ob.pov.cone_base_z = zb
+ ob.pov.cone_cap_z = zc
+
+
+class POVRAY_OT_cone_add(bpy.types.Operator):
+ bl_idname = "pov.cone_add"
+ bl_label = "Cone"
+ bl_description = "Add Cone"
+ bl_options = {'REGISTER', 'UNDO'}
+ COMPAT_ENGINES = {'POVRAY_RENDER'}
+
+ # XXX Keep it in sync with __init__'s RenderPovSettingsConePrimitive
+ # If someone knows how to define operators' props from a func, I'd be delighted to learn it!
+ base = FloatProperty(
+ name = "Base radius", description = "The first radius of the cone",
+ default = 1.0, min = 0.01, max = 100.0)
+ cap = FloatProperty(
+ name = "Cap radius", description = "The second radius of the cone",
+ default = 0.3, min = 0.0, max = 100.0)
+ seg = IntProperty(
+ name = "Segments", description = "Radial segmentation of the proxy mesh",
+ default = 16, min = 3, max = 265)
+ height = FloatProperty(
+ name = "Height", description = "Height of the cone",
+ default = 2.0, min = 0.01, max = 100.0)
+
+ @classmethod
+ def poll(cls, context):
+ engine = context.scene.render.engine
+ return (engine in cls.COMPAT_ENGINES)
+
+ def execute(self, context):
+ pov_cone_define(context, self, None)
+
+ self.report({'WARNING'}, "This native POV-Ray primitive won't have any vertex to show in edit mode")
+ return {'FINISHED'}
+
+
+class POVRAY_OT_cone_update(bpy.types.Operator):
+ bl_idname = "pov.cone_update"
+ bl_label = "Update"
+ bl_description = "Update Cone"
+ bl_options = {'REGISTER', 'UNDO'}
+ COMPAT_ENGINES = {'POVRAY_RENDER'}
+
+ @classmethod
+ def poll(cls, context):
+ engine = context.scene.render.engine
+ ob = context.object
+ return (ob and ob.data and ob.type == 'MESH' and engine in cls.COMPAT_ENGINES)
+
+ def execute(self, context):
+ bpy.ops.object.mode_set(mode="EDIT")
+ bpy.ops.mesh.reveal()
+ bpy.ops.mesh.select_all(action='SELECT')
+ bpy.ops.mesh.delete(type='VERT')
+ bpy.ops.object.mode_set(mode="OBJECT")
+
+ pov_cone_define(context, None, context.object)
+
+ return {'FINISHED'}
+#########################################################################################################
+
+class POVRAY_OT_isosurface_box_add(bpy.types.Operator):
+ bl_idname = "pov.addisosurfacebox"
+ bl_label = "Isosurface Box"
+ bl_description = "Add Isosurface contained by Box"
+ bl_options = {'REGISTER', 'UNDO'}
+
+
+ def execute(self,context):
+ layers = 20*[False]
+ layers[0] = True
+ bpy.ops.mesh.primitive_cube_add(layers = layers)
+ ob = context.object
+ bpy.ops.object.mode_set(mode="EDIT")
+ self.report({'WARNING'}, "This native POV-Ray primitive "
+ "won't have any vertex to show in edit mode")
+ bpy.ops.mesh.hide(unselected=False)
+ bpy.ops.object.mode_set(mode="OBJECT")
+ ob.pov.object_as = "ISOSURFACE"
+ ob.pov.contained_by = 'box'
+ ob.name = 'Isosurface'
+ return {'FINISHED'}
+
+class POVRAY_OT_isosurface_sphere_add(bpy.types.Operator):
+ bl_idname = "pov.addisosurfacesphere"
+ bl_label = "Isosurface Sphere"
+ bl_description = "Add Isosurface contained by Sphere"
+ bl_options = {'REGISTER', 'UNDO'}
+
+
+ def execute(self,context):
+ layers = 20*[False]
+ layers[0] = True
+ bpy.ops.mesh.primitive_ico_sphere_add(subdivisions=4,layers=layers)
+ ob = context.object
+ bpy.ops.object.mode_set(mode="EDIT")
+ self.report({'WARNING'}, "This native POV-Ray primitive "
+ "won't have any vertex to show in edit mode")
+ bpy.ops.mesh.hide(unselected=False)
+ bpy.ops.object.mode_set(mode="OBJECT")
+ bpy.ops.object.shade_smooth()
+ ob.pov.object_as = "ISOSURFACE"
+ ob.pov.contained_by = 'sphere'
+ ob.name = 'Isosurface'
+ return {'FINISHED'}
+
+class POVRAY_OT_sphere_sweep_add(bpy.types.Operator):
+ bl_idname = "pov.addspheresweep"
+ bl_label = "Sphere Sweep"
+ bl_description = "Create Sphere Sweep along curve"
+ bl_options = {'REGISTER', 'UNDO'}
+
+ def execute(self,context):
+ layers = 20*[False]
+ layers[0] = True
+ bpy.ops.curve.primitive_nurbs_curve_add(layers = layers)
+ ob = context.object
+ ob.name = ob.data.name = "PovSphereSweep"
+ ob.pov.curveshape = "sphere_sweep"
+ ob.data.bevel_depth = 0.02
+ ob.data.bevel_resolution = 4
+ ob.data.fill_mode = 'FULL'
+ #ob.data.splines[0].order_u = 4
+
+ return {'FINISHED'}
+
+class POVRAY_OT_blob_add(bpy.types.Operator):
+ bl_idname = "pov.addblobsphere"
+ bl_label = "Blob Sphere"
+ bl_description = "Add Blob Sphere"
+ bl_options = {'REGISTER', 'UNDO'}
+
+ def execute(self,context):
+ layers = 20*[False]
+ layers[0] = True
+ bpy.ops.object.metaball_add(type = 'BALL',layers = layers)
+ ob = context.object
+ ob.name = "Blob"
+ return {'FINISHED'}
+
+
+class POVRAY_OT_rainbow_add(bpy.types.Operator):
+ bl_idname = "pov.addrainbow"
+ bl_label = "Rainbow"
+ bl_description = "Add Rainbow"
+ bl_options = {'REGISTER', 'UNDO'}
+
+ def execute(self,context):
+ cam = context.scene.camera
+ bpy.ops.object.lamp_add(type='SPOT', radius=1)
+ ob = context.object
+ ob.data.show_cone = False
+ ob.data.spot_blend = 0.5
+ ob.data.shadow_buffer_clip_end = 0
+ ob.data.shadow_buffer_clip_start = 4*cam.location.length
+ ob.data.distance = cam.location.length
+ ob.data.energy = 0
+ ob.name = ob.data.name = "PovRainbow"
+ ob.pov.object_as = "RAINBOW"
+
+ #obj = context.object
+ bpy.ops.object.constraint_add(type='DAMPED_TRACK')
+
+
+
+ ob.constraints["Damped Track"].target = cam
+ ob.constraints["Damped Track"].track_axis = 'TRACK_NEGATIVE_Z'
+ ob.location = -cam.location
+
+ #refocus on the actual rainbow
+ bpy.context.scene.objects.active = ob
+ ob.select=True
+
+ return {'FINISHED'}
+
+class POVRAY_OT_height_field_add(bpy.types.Operator, ImportHelper):
+ bl_idname = "pov.addheightfield"
+ bl_label = "Height Field"
+ bl_description = "Add Height Field "
+ bl_options = {'REGISTER', 'UNDO'}
+
+ # XXX Keep it in sync with __init__'s hf Primitive
+ # filename_ext = ".png"
+
+ # filter_glob = StringProperty(
+ # default="*.exr;*.gif;*.hdr;*.iff;*.jpeg;*.jpg;*.pgm;*.png;*.pot;*.ppm;*.sys;*.tga;*.tiff;*.EXR;*.GIF;*.HDR;*.IFF;*.JPEG;*.JPG;*.PGM;*.PNG;*.POT;*.PPM;*.SYS;*.TGA;*.TIFF",
+ # options={'HIDDEN'},
+ # )
+ quality = IntProperty(name = "Quality",
+ description = "",
+ default = 100, min = 1, max = 100)
+ hf_filename = StringProperty(maxlen = 1024)
+
+ hf_gamma = FloatProperty(
+ name="Gamma",
+ description="Gamma",
+ min=0.0001, max=20.0, default=1.0)
+
+ hf_premultiplied = BoolProperty(
+ name="Premultiplied",
+ description="Premultiplied",
+ default=True)
+
+ hf_smooth = BoolProperty(
+ name="Smooth",
+ description="Smooth",
+ default=False)
+
+ hf_water = FloatProperty(
+ name="Water Level",
+ description="Wather Level",
+ min=0.00, max=1.00, default=0.0)
+
+ hf_hierarchy = BoolProperty(
+ name="Hierarchy",
+ description="Height field hierarchy",
+ default=True)
+ def execute(self,context):
+ props = self.properties
+ impath = bpy.path.abspath(self.filepath)
+ img = bpy.data.images.load(impath)
+ im_name = img.name
+ im_name, file_extension = os.path.splitext(im_name)
+ hf_tex = bpy.data.textures.new('%s_hf_image'%im_name, type = 'IMAGE')
+ hf_tex.image = img
+ mat = bpy.data.materials.new('Tex_%s_hf'%im_name)
+ hf_slot = mat.texture_slots.create(-1)
+ hf_slot.texture = hf_tex
+ layers = 20*[False]
+ layers[0] = True
+ quality = props.quality
+ res = 100/quality
+ w,h = hf_tex.image.size[:]
+ w = int(w/res)
+ h = int(h/res)
+ bpy.ops.mesh.primitive_grid_add(x_subdivisions=w, y_subdivisions=h,radius = 0.5,layers=layers)
+ ob = context.object
+ ob.name = ob.data.name = '%s'%im_name
+ ob.data.materials.append(mat)
+ bpy.ops.object.mode_set(mode="EDIT")
+ bpy.ops.mesh.noise(factor=1)
+ bpy.ops.object.mode_set(mode="OBJECT")
+
+ #needs a loop to select by index?
+ #bpy.ops.object.material_slot_remove()
+ #material just left there for now
+
+
+ mat.texture_slots.clear(-1)
+ bpy.ops.object.mode_set(mode="EDIT")
+ bpy.ops.mesh.hide(unselected=False)
+ bpy.ops.object.mode_set(mode="OBJECT")
+ ob.pov.object_as = 'HEIGHT_FIELD'
+ ob.pov.hf_filename = impath
+ return {'FINISHED'}
+
+
+############################TORUS############################################
+def pov_torus_define(context, op, ob):
+ if op:
+ mas = op.mas
+ mis = op.mis
+ mar = op.mar
+ mir = op.mir
+ else:
+ assert(ob)
+ mas = ob.pov.torus_major_segments
+ mis = ob.pov.torus_minor_segments
+ mar = ob.pov.torus_major_radius
+ mir = ob.pov.torus_minor_radius
+
+ #keep object rotation and location for the add object operator
+ obrot = ob.rotation_euler
+ obloc = ob.location
+
+ bpy.ops.object.mode_set(mode="EDIT")
+ bpy.ops.mesh.reveal()
+ bpy.ops.mesh.select_all(action='SELECT')
+ bpy.ops.mesh.delete(type='VERT')
+ bpy.ops.mesh.primitive_torus_add(rotation = obrot, location = obloc, major_segments=mas, minor_segments=mis,major_radius=mar, minor_radius=mir)
+
+
+ bpy.ops.mesh.hide(unselected=False)
+ bpy.ops.object.mode_set(mode="OBJECT")
+
+
+ if not ob:
+ bpy.ops.mesh.primitive_torus_add(major_segments=mas, minor_segments=mis,major_radius=mar, minor_radius=mir)
+ ob = context.object
+ ob.name = ob.data.name = "PovTorus"
+ ob.pov.object_as = "TORUS"
+ ob.pov.torus_major_segments = mas
+ ob.pov.torus_minor_segments = mis
+ ob.pov.torus_major_radius = mar
+ ob.pov.torus_minor_radius = mir
+ bpy.ops.object.mode_set(mode="EDIT")
+ bpy.ops.mesh.hide(unselected=False)
+ bpy.ops.object.mode_set(mode="OBJECT")
+
+class POVRAY_OT_torus_add(bpy.types.Operator):
+ bl_idname = "pov.addtorus"
+ bl_label = "Torus"
+ bl_description = "Add Torus"
+ bl_options = {'REGISTER', 'UNDO'}
+
+ # XXX Keep it in sync with __init__'s torus Primitive
+ mas = IntProperty(name = "Major Segments",
+ description = "",
+ default = 48, min = 3, max = 720)
+ mis = IntProperty(name = "Minor Segments",
+ description = "",
+ default = 12, min = 3, max = 720)
+ mar = FloatProperty(name = "Major Radius",
+ description = "",
+ default = 1.0)
+ mir = FloatProperty(name = "Minor Radius",
+ description = "",
+ default = 0.25)
+ def execute(self,context):
+ props = self.properties
+ mar = props.mar
+ mir = props.mir
+ mas = props.mas
+ mis = props.mis
+ pov_torus_define(context, self, None)
+ self.report({'WARNING'}, "This native POV-Ray primitive "
+ "won't have any vertex to show in edit mode")
+ return {'FINISHED'}
+
+
+class POVRAY_OT_torus_update(bpy.types.Operator):
+ bl_idname = "pov.torus_update"
+ bl_label = "Update"
+ bl_description = "Update Torus"
+ bl_options = {'REGISTER', 'UNDO'}
+ COMPAT_ENGINES = {'POVRAY_RENDER'}
+
+ @classmethod
+ def poll(cls, context):
+ engine = context.scene.render.engine
+ ob = context.object
+ return (ob and ob.data and ob.type == 'MESH' and engine in cls.COMPAT_ENGINES)
+
+ def execute(self, context):
+
+ pov_torus_define(context, None, context.object)
+
+ return {'FINISHED'}
+
+###################################################################################
+
+
+class POVRAY_OT_prism_add(bpy.types.Operator):
+ bl_idname = "pov.addprism"
+ bl_label = "Prism"
+ bl_description = "Create Prism"
+ bl_options = {'REGISTER', 'UNDO'}
+
+ prism_n = IntProperty(name = "Sides",
+ description = "Number of sides",
+ default = 5, min = 3, max = 720)
+ prism_r = FloatProperty(name = "Radius",
+ description = "Radius",
+ default = 1.0)
+ def execute(self,context):
+
+ props = self.properties
+ loftData = bpy.data.curves.new('Prism', type='CURVE')
+ loftData.dimensions = '2D'
+ loftData.resolution_u = 2
+ loftData.show_normal_face = False
+ loftData.extrude = 2
+ n=props.prism_n
+ r=props.prism_r
+ coords = []
+ z = 0
+ angle = 0
+ for p in range(n):
+ x = r*cos(angle)
+ y = r*sin(angle)
+ coords.append((x,y,z))
+ angle+=pi*2/n
+ poly = loftData.splines.new('POLY')
+ poly.points.add(len(coords)-1)
+ for i, coord in enumerate(coords):
+ x,y,z = coord
+ poly.points[i].co = (x, y, z, 1)
+ poly.use_cyclic_u = True
+
+ ob = bpy.data.objects.new('Prism_shape', loftData)
+ scn = bpy.context.scene
+ scn.objects.link(ob)
+ scn.objects.active = ob
+ ob.select = True
+ ob.pov.curveshape = "prism"
+ ob.name = ob.data.name = "Prism"
+ return {'FINISHED'}
+
+##############################PARAMETRIC######################################
+def pov_parametric_define(context, op, ob):
+ if op:
+ u_min = op.u_min
+ u_max = op.u_max
+ v_min = op.v_min
+ v_max = op.v_max
+ x_eq = op.x_eq
+ y_eq = op.y_eq
+ z_eq = op.z_eq
+
+ else:
+ assert(ob)
+ u_min = ob.pov.u_min
+ u_max = ob.pov.u_max
+ v_min = ob.pov.v_min
+ v_max = ob.pov.v_max
+ x_eq = ob.pov.x_eq
+ y_eq = ob.pov.y_eq
+ z_eq = ob.pov.z_eq
+
+ #keep object rotation and location for the updated object
+ obloc = ob.location
+ obrot = ob.rotation_euler # In radians
+ #Parametric addon has no loc rot, some extra work is needed
+ #in case cursor has moved
+ curloc = bpy.context.scene.cursor_location
+
+
+ bpy.ops.object.mode_set(mode="EDIT")
+ bpy.ops.mesh.reveal()
+ bpy.ops.mesh.select_all(action='SELECT')
+ bpy.ops.mesh.delete(type='VERT')
+ bpy.ops.mesh.primitive_xyz_function_surface(x_eq=x_eq, y_eq=y_eq, z_eq=z_eq, range_u_min=u_min, range_u_max=u_max, range_v_min=v_min, range_v_max=v_max)
+ bpy.ops.mesh.select_all(action='SELECT')
+ #extra work:
+ bpy.ops.transform.translate(value=(obloc-curloc), proportional_size=1)
+ bpy.ops.transform.rotate(axis=obrot, proportional_size=1)
+
+ bpy.ops.mesh.hide(unselected=False)
+ bpy.ops.object.mode_set(mode="OBJECT")
+
+
+ if not ob:
+ bpy.ops.mesh.primitive_xyz_function_surface(x_eq=x_eq, y_eq=y_eq, z_eq=z_eq, range_u_min=u_min, range_u_max=u_max, range_v_min=v_min, range_v_max=v_max)
+ ob = context.object
+ ob.name = ob.data.name = "PovParametric"
+ ob.pov.object_as = "PARAMETRIC"
+
+ ob.pov.u_min = u_min
+ ob.pov.u_max = u_max
+ ob.pov.v_min = v_min
+ ob.pov.v_max = v_max
+ ob.pov.x_eq = x_eq
+ ob.pov.y_eq = y_eq
+ ob.pov.z_eq = z_eq
+
+ bpy.ops.object.mode_set(mode="EDIT")
+ bpy.ops.mesh.hide(unselected=False)
+ bpy.ops.object.mode_set(mode="OBJECT")
+class POVRAY_OT_parametric_add(bpy.types.Operator):
+ bl_idname = "pov.addparametric"
+ bl_label = "Parametric"
+ bl_description = "Add Paramertic"
+ bl_options = {'REGISTER', 'UNDO'}
+
+ # XXX Keep it in sync with __init__'s Parametric primitive
+ u_min = FloatProperty(name = "U Min",
+ description = "",
+ default = 0.0)
+ v_min = FloatProperty(name = "V Min",
+ description = "",
+ default = 0.0)
+ u_max = FloatProperty(name = "U Max",
+ description = "",
+ default = 6.28)
+ v_max = FloatProperty(name = "V Max",
+ description = "",
+ default = 12.57)
+ x_eq = StringProperty(
+ maxlen=1024, default = "cos(v)*(1+cos(u))*sin(v/8)")
+ y_eq = StringProperty(
+ maxlen=1024, default = "sin(u)*sin(v/8)+cos(v/8)*1.5")
+ z_eq = StringProperty(
+ maxlen=1024, default = "sin(v)*(1+cos(u))*sin(v/8)")
+
+ def execute(self,context):
+ props = self.properties
+ u_min = props.u_min
+ v_min = props.v_min
+ u_max = props.u_max
+ v_max = props.v_max
+ x_eq = props.x_eq
+ y_eq = props.y_eq
+ z_eq = props.z_eq
+
+ pov_parametric_define(context, self, None)
+ self.report({'WARNING'}, "This native POV-Ray primitive "
+ "won't have any vertex to show in edit mode")
+ return {'FINISHED'}
+
+class POVRAY_OT_parametric_update(bpy.types.Operator):
+ bl_idname = "pov.parametric_update"
+ bl_label = "Update"
+ bl_description = "Update parametric object"
+ bl_options = {'REGISTER', 'UNDO'}
+ COMPAT_ENGINES = {'POVRAY_RENDER'}
+
+ @classmethod
+ def poll(cls, context):
+ engine = context.scene.render.engine
+ ob = context.object
+ return (ob and ob.data and ob.type == 'MESH' and engine in cls.COMPAT_ENGINES)
+
+ def execute(self, context):
+
+ pov_parametric_define(context, None, context.object)
+
+ return {'FINISHED'}
+#######################################################################
+class POVRAY_OT_shape_polygon_to_circle_add(bpy.types.Operator):
+ bl_idname = "pov.addpolygontocircle"
+ bl_label = "Polygon To Circle Blending"
+ bl_description = "Add Polygon To Circle Blending Surface"
+ bl_options = {'REGISTER', 'UNDO'}
+ COMPAT_ENGINES = {'POVRAY_RENDER'}
+
+ # XXX Keep it in sync with __init__'s polytocircle properties
+ polytocircle_resolution = IntProperty(name = "Resolution",
+ description = "",
+ default = 3, min = 0, max = 256)
+ polytocircle_ngon = IntProperty(name = "NGon",
+ description = "",
+ min = 3, max = 64,default = 5)
+ polytocircle_ngonR = FloatProperty(name = "NGon Radius",
+ description = "",
+ default = 0.3)
+ polytocircle_circleR = FloatProperty(name = "Circle Radius",
+ description = "",
+ default = 1.0)
+ def execute(self,context):
+ props = self.properties
+ ngon = props.polytocircle_ngon
+ ngonR = props.polytocircle_ngonR
+ circleR = props.polytocircle_circleR
+ resolution = props.polytocircle_resolution
+ layers = 20*[False]
+ layers[0] = True
+ bpy.ops.mesh.primitive_circle_add(vertices=ngon, radius=ngonR, fill_type='NGON',enter_editmode=True, layers=layers)
+ bpy.ops.transform.translate(value=(0, 0, 1))
+ bpy.ops.mesh.subdivide(number_cuts=resolution)
+ numCircleVerts = ngon + (ngon*resolution)
+ bpy.ops.mesh.select_all(action='DESELECT')
+ bpy.ops.mesh.primitive_circle_add(vertices=numCircleVerts, radius=circleR, fill_type='NGON',enter_editmode=True, layers=layers)
+ bpy.ops.transform.translate(value=(0, 0, -1))
+ bpy.ops.mesh.select_all(action='SELECT')
+ bpy.ops.mesh.bridge_edge_loops()
+ if ngon < 5:
+ bpy.ops.mesh.select_all(action='DESELECT')
+ bpy.ops.mesh.primitive_circle_add(vertices=ngon, radius=ngonR, fill_type='TRIFAN',enter_editmode=True, layers=layers)
+ bpy.ops.transform.translate(value=(0, 0, 1))
+ bpy.ops.mesh.select_all(action='SELECT')
+ bpy.ops.mesh.remove_doubles()
+ bpy.ops.object.mode_set(mode='OBJECT')
+ ob = context.object
+ ob.name = "Polygon_To_Circle"
+ ob.pov.object_as = 'POLYCIRCLE'
+ ob.pov.ngon = ngon
+ ob.pov.ngonR = ngonR
+ ob.pov.circleR = circleR
+ bpy.ops.object.mode_set(mode="EDIT")
+ bpy.ops.mesh.hide(unselected=False)
+ bpy.ops.object.mode_set(mode="OBJECT")
+ return {'FINISHED'}
+
+#############################IMPORT
+class ImportAvogadroPOV(bpy.types.Operator, ImportHelper):
+ """Load Povray File as output by Avogadro"""
+ bl_idname = "import_scene.avogadro"
+ bl_label = "Import POV Avogadro"
+ bl_options = {'PRESET', 'UNDO'}
+ COMPAT_ENGINES = {'POVRAY_RENDER'}
+
+ filename_ext = ".pov"
+ filter_glob = StringProperty(
+ default="*.pov",
+ options={'HIDDEN'},
+ )
+
+ def execute(self, context):
+ coords=[]
+ colors = []
+ matNames = []
+ xall = yall = zall = []
+ layers = 20*[False]
+ layers[0] = True
+ ob = None
+ camloc = (0,0,0)
+ filepov = bpy.path.abspath(self.filepath)
+ for line in open(filepov):
+ string = line.replace("<"," ")
+ chars = [">","{","}",","]
+ for symbol in chars:
+ string = string.replace(symbol," ")
+ split = string.split()
+ if split and split[0] == "location":
+ x = float(split[1])
+ y = float(split[2])
+ z = float(split[3])
+ camloc = ((x,y,z))
+ if split and len(split) == 7:
+ try:
+ x1 = float(split[0])
+ coords.append(x1)
+ except:
+ pass
+ if coords != []:
+ x1 = float(split[0])
+ y1 = float(split[1])
+ z1 = float(split[2])
+ x2 = float(split[3])
+ y2 = float(split[4])
+ z2 = float(split[5])
+ xall.append(x1)
+ yall.append(y1)
+ zall.append(z1)
+ xall.append(x2)
+ yall.append(y2)
+ zall.append(z2)
+ radius = float(split[6])
+ curveData = bpy.data.curves.new('myCurve', type='CURVE')
+ curveData.dimensions = '3D'
+ curveData.resolution_u = 2
+ curveData.fill_mode = "FULL"
+ curveData.bevel_depth = radius
+ curveData.bevel_resolution = 5
+ polyline = curveData.splines.new('POLY')
+ polyline.points.add(1)
+ polyline.points[0].co = (x1, y1, z1, 1)
+ polyline.points[1].co = (x2, y2, z2, 1)
+ ob = bpy.data.objects.new('myCurve', curveData)
+ scn = bpy.context.scene
+ scn.objects.link(ob)
+ scn.objects.active = ob
+ ob.select = True
+ bpy.ops.object.convert(target='MESH',keep_original=False)
+ #XXX TODO use a PovCylinder instead of mesh
+ #but add end points and radius to addPovcylinder op first
+ ob.select=False
+ coords = []
+ if split and len(split) == 4:
+ try:
+ x = float(split[0])
+ coords.append(x)
+ except:
+ pass
+ if coords != []:
+ x = float(split[0])
+ y = float(split[1])
+ z = float(split[2])
+ xall.append(x)
+ yall.append(y)
+ zall.append(z)
+ radius = float(split[3])
+
+
+ ob.pov.imported_loc=(x, y, z)
+ bpy.ops.pov.addsphere(R=radius, imported_loc=(x, y, z))
+ bpy.ops.object.shade_smooth()
+ ob = bpy.context.object
+ coords = []
+ if split and len(split) == 6:
+ if split[0] == "pigment":
+ r,g,b,t = float(split[2]),float(split[3]),float(split[4]),float(split[5])
+ color = (r,g,b,t)
+ if colors == [] or (colors != [] and color not in colors):
+ colors.append(color)
+ name = ob.name+"_mat"
+ matNames.append(name)
+ mat = bpy.data.materials.new(name)
+ mat.diffuse_color = (r,g,b)
+ mat.alpha = 1-t
+ ob.data.materials.append(mat)
+ print (colors)
+ else:
+ for i in range(len(colors)):
+ if color == colors[i]:
+ ob.data.materials.append(bpy.data.materials[matNames[i]])
+ x0 = min(xall)
+ x1 = max(xall)
+ y0 = min(yall)
+ y1 = max(yall)
+ z0 = min(zall)
+ z1 = max(zall)
+ x = (x0+x1)/2
+ y = (y0+y1)/2
+ z = (z0+z1)/2
+ bpy.ops.object.empty_add(layers=layers)
+ ob = bpy.context.object
+ ob.location = ((x,y,z))
+ for obj in bpy.context.scene.objects:
+ if obj.type == "CAMERA":
+ track = obj.constraints.new(type = "TRACK_TO")
+ track.target = ob
+ track.track_axis ="TRACK_NEGATIVE_Z"
+ track.up_axis = "UP_Y"
+ obj.location = camloc
+ for obj in bpy.context.scene.objects:
+ if obj.type == "LAMP":
+ obj.location = camloc
+ obj.pov.light_type = "shadowless"
+ break
+ return {'FINISHED'}
diff --git a/render_povray/render.py b/render_povray/render.py
index 428f667a..acc27a03 100644
--- a/render_povray/render.py
+++ b/render_povray/render.py
@@ -23,13 +23,17 @@ import subprocess
import os
import sys
import time
-from math import atan, pi, degrees, sqrt
+from math import atan, pi, degrees, sqrt, cos, sin
import re
import random
import platform#
import subprocess#
-from bpy.types import(Operator)#all added for render preview
+from bpy.types import(Operator)
+
+
from . import df3 # for smoke rendering
+from . import shading # for BI POV haders emulation
+from . import primitives # for import and export of POV specific primitives
##############################SF###########################
##############find image texture
def imageFormat(imgF):
@@ -185,12 +189,128 @@ def renderable_objects():
tabLevel = 0
unpacked_images=[]
-workDir=os.path.dirname(__file__)
-previewDir=os.path.join(workDir, "preview")
+user_dir = bpy.utils.resource_path('USER')
+preview_dir = os.path.join(user_dir, "preview")
+
## Make sure Preview directory exists and is empty
-if not os.path.isdir(previewDir):
- os.mkdir(previewDir)
-smokePath = os.path.join(previewDir, "smoke.df3")
+smokePath = os.path.join(preview_dir, "smoke.df3")
+
+def write_global_setting(scene,file):
+ file.write("global_settings {\n")
+ file.write(" assumed_gamma %.6f\n"%scene.pov.assumed_gamma)
+ if scene.pov.global_settings_default == False:
+ if scene.pov.adc_bailout_enable and scene.pov.radio_enable == False:
+ file.write(" adc_bailout %.6f\n"%scene.pov.adc_bailout)
+ if scene.pov.ambient_light_enable:
+ file.write(" ambient_light <%.6f,%.6f,%.6f>\n"%scene.pov.ambient_light[:])
+ if scene.pov.irid_wavelength_enable:
+ file.write(" irid_wavelength <%.6f,%.6f,%.6f>\n"%scene.pov.irid_wavelength[:])
+ if scene.pov.charset_enable:
+ file.write(" charset %s\n"%scene.pov.charset)
+ if scene.pov.max_trace_level_enable:
+ file.write(" max_trace_level %s\n"%scene.pov.max_trace_level)
+ if scene.pov.max_intersections_enable:
+ file.write(" max_intersections %s\n"%scene.pov.max_intersections)
+ if scene.pov.number_of_waves_enable:
+ file.write(" number_of_waves %s\n"%scene.pov.number_of_waves)
+ if scene.pov.noise_generator_enable:
+ file.write(" noise_generator %s\n"%scene.pov.noise_generator)
+ if scene.pov.sslt_enable:
+ file.write(" mm_per_unit %s\n"%scene.pov.mm_per_unit)
+ file.write(" subsurface {\n")
+ file.write(" samples %s, %s\n"%(scene.pov.sslt_samples_max,scene.pov.sslt_samples_min))
+ if scene.pov.sslt_radiosity:
+ file.write(" radiosity on\n")
+ file.write("}\n")
+
+ if scene.pov.radio_enable:
+ file.write(" radiosity {\n")
+ file.write(" pretrace_start %.6f\n"%scene.pov.radio_pretrace_start)
+ file.write(" pretrace_end %.6f\n"%scene.pov.radio_pretrace_end)
+ file.write(" count %s\n"%scene.pov.radio_count)
+ file.write(" nearest_count %s\n"%scene.pov.radio_nearest_count)
+ file.write(" error_bound %.6f\n"%scene.pov.radio_error_bound)
+ file.write(" recursion_limit %s\n"%scene.pov.radio_recursion_limit)
+ file.write(" low_error_factor %.6f\n"%scene.pov.radio_low_error_factor)
+ file.write(" gray_threshold %.6f\n"%scene.pov.radio_gray_threshold)
+ file.write(" maximum_reuse %.6f\n"%scene.pov.radio_maximum_reuse)
+ file.write(" minimum_reuse %.6f\n"%scene.pov.radio_minimum_reuse)
+ file.write(" brightness %.6f\n"%scene.pov.radio_brightness)
+ file.write(" adc_bailout %.6f\n"%scene.pov.radio_adc_bailout)
+ if scene.pov.radio_normal:
+ file.write(" normal on\n")
+ if scene.pov.radio_always_sample:
+ file.write(" always_sample on\n")
+ if scene.pov.radio_media:
+ file.write(" media on\n")
+ if scene.pov.radio_subsurface:
+ file.write(" subsurface on\n")
+ file.write(" }\n")
+
+ if scene.pov.photon_enable:
+ file.write(" photons {\n")
+ if scene.pov.photon_enable_count:
+ file.write(" count %s\n"%scene.pov.photon_count)
+ else:
+ file.write(" spacing %.6g\n"%scene.pov.photon_spacing)
+ if scene.pov.photon_gather:
+ file.write(" gather %s, %s\n"%(scene.pov.photon_gather_min,scene.pov.photon_gather_max))
+ if scene.pov.photon_autostop:
+ file.write(" autostop %.4g\n"%scene.pov.photon_autostop_value)
+ if scene.pov.photon_jitter_enable:
+ file.write(" jitter %.4g\n"%scene.pov.photon_jitter)
+ file.write(" max_trace_level %s\n"%scene.pov.photon_max_trace_level)
+ if scene.pov.photon_adc:
+ file.write(" adc_bailout %.6f\n"%scene.pov.photon_adc_bailout)
+ if scene.pov.photon_media_enable:
+ file.write(" media %s, %s\n"%(scene.pov.photon_media_steps,scene.pov.photon_media_factor))
+ if scene.pov.photon_savefile or scene.pov.photon_loadfile:
+ filePh = bpy.path.abspath(scene.pov.photon_map_file)
+ if scene.pov.photon_savefile:
+ file.write('save_file "%s"\n'%filePh)
+ if scene.pov.photon_loadfile and os.path.exists(filePh):
+ file.write('load_file "%s"\n'%filePh)
+ file.write("}\n")
+ file.write("}\n")
+
+def write_object_modifiers(scene,ob,File):
+ if ob.pov.hollow:
+ File.write("hollow\n")
+ if ob.pov.double_illuminate:
+ File.write("double_illuminate\n")
+ if ob.pov.sturm:
+ File.write("sturm\n")
+ if ob.pov.no_shadow:
+ File.write("no_shadow\n")
+ if ob.pov.no_image:
+ File.write("no_image\n")
+ if ob.pov.no_reflection:
+ File.write("no_reflection\n")
+ if ob.pov.no_radiosity:
+ File.write("no_radiosity\n")
+ if ob.pov.inverse:
+ File.write("inverse\n")
+ if ob.pov.hierarchy:
+ File.write("hierarchy\n")
+ if scene.pov.photon_enable:
+ File.write("photons {\n")
+ if ob.pov.target:
+ File.write("target %.4g\n"%ob.pov.target_value)
+ if ob.pov.refraction:
+ File.write("refraction on\n")
+ if ob.pov.reflection:
+ File.write("reflection on\n")
+ if ob.pov.pass_through:
+ File.write("pass_through\n")
+ File.write("}\n")
+ # if ob.pov.object_ior > 1:
+ # File.write("interior {\n")
+ # File.write("ior %.4g\n"%ob.pov.object_ior)
+ # if scene.pov.photon_enable and ob.pov.target and ob.pov.refraction and ob.pov.dispersion:
+ # File.write("ior %.4g\n"%ob.pov.dispersion_value)
+ # File.write("ior %s\n"%ob.pov.dispersion_samples)
+ # if scene.pov.photon_enable == False:
+ # File.write("caustics %.4g\n"%ob.pov.fake_caustics_power)
def exportPattern(texture):
tex=texture
@@ -825,236 +945,7 @@ def write_pov(filename, scene=None, info_callback=None):
materialNames = {}
DEF_MAT_NAME = "" #or "Default"?
-
- def writeMaterial(material):
- # Assumes only called once on each material
- if material:
- name_orig = material.name
- name = materialNames[name_orig] = uniqueName(bpy.path.clean_name(name_orig), materialNames)
- else:
- name = name_orig = DEF_MAT_NAME
-
-
- if material:
- # If saturation(.s) is not zero, then color is not grey, and has a tint
- colored_specular_found = (material.specular_color.s > 0.0)
-
- ##################
- # Several versions of the finish: Level conditions are variations for specular/Mirror
- # texture channel map with alternative finish of 0 specular and no mirror reflection.
- # Level=1 Means No specular nor Mirror reflection
- # Level=2 Means translation of spec and mir levels for when no map influences them
- # Level=3 Means Maximum Spec and Mirror
-
- def povHasnoSpecularMaps(Level):
- if Level == 1:
- tabWrite("#declare %s = finish {" % safety(name, Level=1))
- if comments:
- file.write(" //No specular nor Mirror reflection\n")
- else:
- tabWrite("\n")
- elif Level == 2:
- tabWrite("#declare %s = finish {" % safety(name, Level=2))
- if comments:
- file.write(" //translation of spec and mir levels for when no map " \
- "influences them\n")
- else:
- tabWrite("\n")
- elif Level == 3:
- tabWrite("#declare %s = finish {" % safety(name, Level=3))
- if comments:
- file.write(" //Maximum Spec and Mirror\n")
- else:
- tabWrite("\n")
-
- if material:
- # POV-Ray 3.7 now uses two diffuse values respectively for front and back shading
- # (the back diffuse is like blender translucency)
- frontDiffuse = material.diffuse_intensity
- backDiffuse = material.translucency
-
- if material.pov.conserve_energy:
-
- #Total should not go above one
- if (frontDiffuse + backDiffuse) <= 1.0:
- pass
- elif frontDiffuse == backDiffuse:
- # Try to respect the user's 'intention' by comparing the two values but
- # bringing the total back to one.
- frontDiffuse = backDiffuse = 0.5
- # Let the highest value stay the highest value.
- elif frontDiffuse > backDiffuse:
- # clamps the sum below 1
- backDiffuse = min(backDiffuse, (1.0 - frontDiffuse))
- else:
- frontDiffuse = min(frontDiffuse, (1.0 - backDiffuse))
-
- # map hardness between 0.0 and 1.0
- roughness = ((1.0 - ((material.specular_hardness - 1.0) / 510.0)))
- ## scale from 0.0 to 0.1
- roughness *= 0.1
- # add a small value because 0.0 is invalid.
- roughness += (1.0 / 511.0)
-
- ################################Diffuse Shader######################################
- # Not used for Full spec (Level=3) of the shader.
- if material.diffuse_shader == 'OREN_NAYAR' and Level != 3:
- # Blender roughness is what is generally called oren nayar Sigma,
- # and brilliance in POV-Ray.
- tabWrite("brilliance %.3g\n" % (0.9 + material.roughness))
-
- if material.diffuse_shader == 'TOON' and Level != 3:
- tabWrite("brilliance %.3g\n" % (0.01 + material.diffuse_toon_smooth * 0.25))
- # Lower diffuse and increase specular for toon effect seems to look better
- # in POV-Ray.
- frontDiffuse *= 0.5
-
- if material.diffuse_shader == 'MINNAERT' and Level != 3:
- #tabWrite("aoi %.3g\n" % material.darkness)
- pass # let's keep things simple for now
- if material.diffuse_shader == 'FRESNEL' and Level != 3:
- #tabWrite("aoi %.3g\n" % material.diffuse_fresnel_factor)
- pass # let's keep things simple for now
- if material.diffuse_shader == 'LAMBERT' and Level != 3:
- # trying to best match lambert attenuation by that constant brilliance value
- tabWrite("brilliance 1.8\n")
-
- if Level == 2:
- ###########################Specular Shader######################################
- # No difference between phong and cook torrence in blender HaHa!
- if (material.specular_shader == 'COOKTORR' or
- material.specular_shader == 'PHONG'):
- tabWrite("phong %.3g\n" % (material.specular_intensity))
- tabWrite("phong_size %.3g\n" % (material.specular_hardness / 2 + 0.25))
-
- # POV-Ray 'specular' keyword corresponds to a Blinn model, without the ior.
- elif material.specular_shader == 'BLINN':
- # Use blender Blinn's IOR just as some factor for spec intensity
- tabWrite("specular %.3g\n" % (material.specular_intensity *
- (material.specular_ior / 4.0)))
- tabWrite("roughness %.3g\n" % roughness)
- #Could use brilliance 2(or varying around 2 depending on ior or factor) too.
-
- elif material.specular_shader == 'TOON':
- tabWrite("phong %.3g\n" % (material.specular_intensity * 2.0))
- # use extreme phong_size
- tabWrite("phong_size %.3g\n" % (0.1 + material.specular_toon_smooth / 2.0))
-
- elif material.specular_shader == 'WARDISO':
- # find best suited default constant for brilliance Use both phong and
- # specular for some values.
- tabWrite("specular %.3g\n" % (material.specular_intensity /
- (material.specular_slope + 0.0005)))
- # find best suited default constant for brilliance Use both phong and
- # specular for some values.
- tabWrite("roughness %.4g\n" % (0.0005 + material.specular_slope / 10.0))
- # find best suited default constant for brilliance Use both phong and
- # specular for some values.
- tabWrite("brilliance %.4g\n" % (1.8 - material.specular_slope * 1.8))
-
- ####################################################################################
- elif Level == 1:
- tabWrite("specular 0\n")
- elif Level == 3:
- tabWrite("specular 1\n")
- tabWrite("diffuse %.3g %.3g\n" % (frontDiffuse, backDiffuse))
-
- tabWrite("ambient %.3g\n" % material.ambient)
- # POV-Ray blends the global value
- #tabWrite("ambient rgb <%.3g, %.3g, %.3g>\n" % \
- # tuple([c*material.ambient for c in world.ambient_color]))
- tabWrite("emission %.3g\n" % material.emit) # New in POV-Ray 3.7
-
- #POV-Ray just ignores roughness if there's no specular keyword
- #tabWrite("roughness %.3g\n" % roughness)
-
- if material.pov.conserve_energy:
- # added for more realistic shading. Needs some checking to see if it
- # really works. --Maurice.
- tabWrite("conserve_energy\n")
-
- if colored_specular_found == True:
- tabWrite("metallic\n")
-
- # 'phong 70.0 '
- if Level != 1:
- if material.raytrace_mirror.use:
- raytrace_mirror = material.raytrace_mirror
- if raytrace_mirror.reflect_factor:
- tabWrite("reflection {\n")
- tabWrite("rgb <%.3g, %.3g, %.3g>\n" % material.mirror_color[:])
- if material.pov.mirror_metallic:
- tabWrite("metallic %.3g\n" % (raytrace_mirror.reflect_factor))
- # Blurry reflections for UberPOV
- if using_uberpov and raytrace_mirror.gloss_factor < 1.0:
- #tabWrite("#ifdef(unofficial) #if(unofficial = \"patch\") #if(patch(\"upov-reflection-roughness\") > 0)\n")
- tabWrite("roughness %.6f\n" % \
- (0.000001/raytrace_mirror.gloss_factor))
- #tabWrite("#end #end #end\n") # This and previous comment for backward compatibility, messier pov code
- if material.pov.mirror_use_IOR: # WORKING ?
- # Removed from the line below: gives a more physically correct
- # material but needs proper IOR. --Maurice
- tabWrite("fresnel 1 ")
- tabWrite("falloff %.3g exponent %.3g} " % \
- (raytrace_mirror.fresnel, raytrace_mirror.fresnel_factor))
-
- if material.subsurface_scattering.use:
- subsurface_scattering = material.subsurface_scattering
- tabWrite("subsurface { translucency <%.3g, %.3g, %.3g> }\n" % (
- (subsurface_scattering.radius[0]),
- (subsurface_scattering.radius[1]),
- (subsurface_scattering.radius[2]),
- )
- )
-
- if material.pov.irid_enable:
- tabWrite("irid { %.4g thickness %.4g turbulence %.4g }" % \
- (material.pov.irid_amount, material.pov.irid_thickness,
- material.pov.irid_turbulence))
-
- else:
- tabWrite("diffuse 0.8\n")
- tabWrite("phong 70.0\n")
-
- #tabWrite("specular 0.2\n")
-
- # This is written into the object
- '''
- if material and material.transparency_method=='RAYTRACE':
- 'interior { ior %.3g} ' % material.raytrace_transparency.ior
- '''
-
- #tabWrite("crand 1.0\n") # Sand granyness
- #tabWrite("metallic %.6f\n" % material.spec)
- #tabWrite("phong %.6f\n" % material.spec)
- #tabWrite("phong_size %.6f\n" % material.spec)
- #tabWrite("brilliance %.6f " % (material.specular_hardness/256.0) # Like hardness
-
- tabWrite("}\n\n")
-
- # Level=2 Means translation of spec and mir levels for when no map influences them
- povHasnoSpecularMaps(Level=2)
-
- if material:
- special_texture_found = False
- for t in material.texture_slots:
- if t and t.use:
- if (t.texture.type == 'IMAGE' and t.texture.image) or t.texture.type != 'IMAGE':
- validPath=True
- else:
- validPath=False
- if(t and t.use and validPath and
- (t.use_map_specular or t.use_map_raymir or t.use_map_normal or t.use_map_alpha)):
- special_texture_found = True
- continue # Some texture found
-
- if special_texture_found or colored_specular_found:
- # Level=1 Means No specular nor Mirror reflection
- povHasnoSpecularMaps(Level=1)
-
- # Level=3 Means Maximum Spec and Mirror
- povHasnoSpecularMaps(Level=3)
-
+
def exportCamera():
camera = scene.camera
@@ -1097,6 +988,8 @@ def write_pov(filename, scene=None, info_callback=None):
tabWrite("focal_point <0, 0, %f>\n" % focal_point)
tabWrite("}\n")
+
+
def exportLamps(lamps):
# Incremented after each lamp export to declare its target
# currently used for Fresnel diffuse shader as their slope vector:
@@ -1189,7 +1082,823 @@ def write_pov(filename, scene=None, info_callback=None):
ob.rotation_euler.x, ob.rotation_euler.y, ob.rotation_euler.z))
####################################################################################################
+ def exportRainbows(rainbows):
+ for ob in rainbows:
+ povdataname = ob.data.name #enough?
+ angle = degrees(ob.data.spot_size/2.5) #radians in blender (2
+ width = ob.data.spot_blend *10
+ distance = ob.data.shadow_buffer_clip_start
+ #eps=0.0000001
+ #angle = br/(cr+eps) * 10 #eps is small epsilon variable to avoid dividing by zero
+ #width = ob.dimensions[2] #now let's say width of rainbow is the actual proxy height
+ # formerly:
+ #cz-bz # let's say width of the rainbow is height of the cone (interfacing choice
+
+ # v(A,B) rotates vector A about origin by vector B.
+ # and avoid a 0 length vector by adding 1
+
+ # file.write("#declare %s_Target= vrotate(<%.6g,%.6g,%.6g>,<%.4g,%.4g,%.4g>);\n" % \
+ # (povdataname, -(ob.location.x+0.1), -(ob.location.y+0.1), -(ob.location.z+0.1),
+ # ob.rotation_euler.x, ob.rotation_euler.y, ob.rotation_euler.z))
+
+ direction = (ob.location.x,ob.location.y,ob.location.z) # not taking matrix into account
+ rmatrix = global_matrix * ob.matrix_world
+
+ #ob.rotation_euler.to_matrix().to_4x4() * mathutils.Vector((0,0,1))
+ # XXX Is result of the below offset by 90 degrees?
+ up =ob.matrix_world.to_3x3()[1].xyz #* global_matrix
+
+ # XXX TO CHANGE:
+ #formerly:
+ #tabWrite("#declare %s = rainbow {\n"%povdataname)
+
+ # clumsy for now but remove the rainbow from instancing
+ # system because not an object. use lamps later instead of meshes
+
+ #del data_ref[dataname]
+ tabWrite("rainbow {\n")
+
+ tabWrite("angle %.4f\n"%angle)
+ tabWrite("width %.4f\n"%width)
+ tabWrite("distance %.4f\n"%distance)
+ tabWrite("arc_angle %.4f\n"%ob.pov.arc_angle)
+ tabWrite("falloff_angle %.4f\n"%ob.pov.falloff_angle)
+ tabWrite("direction <%.4f,%.4f,%.4f>\n"%rmatrix.translation[:])
+ tabWrite("up <%.4f,%.4f,%.4f>\n"%(up[0],up[1],up[2]))
+ tabWrite("color_map {\n")
+ tabWrite("[0.000 color rgbt<1.0, 0.5, 1.0, 1.0>]\n")
+ tabWrite("[0.130 color rgbt<0.5, 0.5, 1.0, 0.9>]\n")
+ tabWrite("[0.298 color rgbt<0.2, 0.2, 1.0, 0.7>]\n")
+ tabWrite("[0.412 color rgbt<0.2, 1.0, 1.0, 0.4>]\n")
+ tabWrite("[0.526 color rgbt<0.2, 1.0, 0.2, 0.4>]\n")
+ tabWrite("[0.640 color rgbt<1.0, 1.0, 0.2, 0.4>]\n")
+ tabWrite("[0.754 color rgbt<1.0, 0.5, 0.2, 0.6>]\n")
+ tabWrite("[0.900 color rgbt<1.0, 0.2, 0.2, 0.7>]\n")
+ tabWrite("[1.000 color rgbt<1.0, 0.2, 0.2, 1.0>]\n")
+ tabWrite("}\n")
+
+
+ povMatName = "Default_texture"
+ #tabWrite("texture {%s}\n"%povMatName)
+ write_object_modifiers(scene,ob,file)
+ #tabWrite("rotate x*90\n")
+ #matrix = global_matrix * ob.matrix_world
+ #writeMatrix(matrix)
+ tabWrite("}\n")
+ #continue #Don't render proxy mesh, skip to next object
+
+################################XXX LOFT, ETC.
+ def exportCurves(scene, ob):
+ name_orig = "OB" + ob.name
+ dataname_orig = "DATA" + ob.data.name
+
+ name = string_strip_hyphen(bpy.path.clean_name(name_orig))
+ dataname = string_strip_hyphen(bpy.path.clean_name(dataname_orig))
+
+ global_matrix = mathutils.Matrix.Rotation(-pi / 2.0, 4, 'X')
+ matrix=global_matrix*ob.matrix_world
+ bezier_sweep = False
+ if ob.pov.curveshape == 'sphere_sweep':
+ for spl in ob.data.splines:
+ if spl.type == "BEZIER":
+ bezier_sweep = True
+ if ob.pov.curveshape in {'loft','birail'}:
+ n=0
+ for spline in ob.data.splines:
+ n+=1
+ tabWrite('#declare %s%s=spline {\n'%(dataname,n))
+ tabWrite('cubic_spline\n')
+ lp = len(spline.points)
+ delta = 1/(lp)
+ d=-delta
+ point = spline.points[lp-1]
+ x,y,z,w = point.co[:]
+ tabWrite('%.6f, <%.6f,%.6f,%.6f>\n'%(d,x,y,z))
+ d+=delta
+ for point in spline.points:
+ x,y,z,w = point.co[:]
+ tabWrite('%.6f, <%.6f,%.6f,%.6f>\n'%(d,x,y,z))
+ d+=delta
+ for i in range(2):
+ point = spline.points[i]
+ x,y,z,w = point.co[:]
+ tabWrite('%.6f, <%.6f,%.6f,%.6f>\n'%(d,x,y,z))
+ d+=delta
+ tabWrite('}\n')
+ if ob.pov.curveshape in {'loft'}:
+ n = len(ob.data.splines)
+ tabWrite('#declare %s = array[%s]{\n'%(dataname,(n+3)))
+ tabWrite('spline{%s%s},\n'%(dataname,n))
+ for i in range(n):
+ tabWrite('spline{%s%s},\n'%(dataname,(i+1)))
+ tabWrite('spline{%s1},\n'%(dataname))
+ tabWrite('spline{%s2}\n'%(dataname))
+ tabWrite('}\n')
+ # Use some of the Meshmaker.inc macro, here inlined
+ file.write('#macro CheckFileName(FileName)\n')
+ file.write(' #local Len=strlen(FileName);\n')
+ file.write(' #if(Len>0)\n')
+ file.write(' #if(file_exists(FileName))\n')
+ file.write(' #if(Len>=4)\n')
+ file.write(' #local Ext=strlwr(substr(FileName,Len-3,4))\n')
+ file.write(' #if (strcmp(Ext,".obj")=0 | strcmp(Ext,".pcm")=0 | strcmp(Ext,".arr")=0)\n')
+ file.write(' #local Return=99;\n')
+ file.write(' #else\n')
+ file.write(' #local Return=0;\n')
+ file.write(' #end\n')
+ file.write(' #else\n')
+ file.write(' #local Return=0;\n')
+ file.write(' #end\n')
+ file.write(' #else\n')
+ file.write(' #if(Len>=4)\n')
+ file.write(' #local Ext=strlwr(substr(FileName,Len-3,4))\n')
+ file.write(' #if (strcmp(Ext,".obj")=0 | strcmp(Ext,".pcm")=0 | strcmp(Ext,".arr")=0)\n')
+ file.write(' #if (strcmp(Ext,".obj")=0)\n')
+ file.write(' #local Return=2;\n')
+ file.write(' #end\n')
+ file.write(' #if (strcmp(Ext,".pcm")=0)\n')
+ file.write(' #local Return=3;\n')
+ file.write(' #end\n')
+ file.write(' #if (strcmp(Ext,".arr")=0)\n')
+ file.write(' #local Return=4;\n')
+ file.write(' #end\n')
+ file.write(' #else\n')
+ file.write(' #local Return=1;\n')
+ file.write(' #end\n')
+ file.write(' #else\n')
+ file.write(' #local Return=1;\n')
+ file.write(' #end\n')
+ file.write(' #end\n')
+ file.write(' #else\n')
+ file.write(' #local Return=1;\n')
+ file.write(' #end\n')
+ file.write(' (Return)\n')
+ file.write('#end\n')
+
+ file.write('#macro BuildSpline(Arr, SplType)\n')
+ file.write(' #local Ds=dimension_size(Arr,1);\n')
+ file.write(' #local Asc=asc(strupr(SplType));\n')
+ file.write(' #if(Asc!=67 & Asc!=76 & Asc!=81) \n')
+ file.write(' #local Asc=76;\n')
+ file.write(' #debug "\nWrong spline type defined (C/c/L/l/N/n/Q/q), using default linear_spline\\n"\n')
+ file.write(' #end\n')
+ file.write(' spline {\n')
+ file.write(' #switch (Asc)\n')
+ file.write(' #case (67) //C cubic_spline\n')
+ file.write(' cubic_spline\n')
+ file.write(' #break\n')
+ file.write(' #case (76) //L linear_spline\n')
+ file.write(' linear_spline\n')
+ file.write(' #break\n')
+ file.write(' #case (78) //N linear_spline\n')
+ file.write(' natural_spline\n')
+ file.write(' #break\n')
+ file.write(' #case (81) //Q Quadratic_spline\n')
+ file.write(' quadratic_spline\n')
+ file.write(' #break\n')
+ file.write(' #end\n')
+ file.write(' #local Add=1/((Ds-2)-1);\n')
+ file.write(' #local J=0-Add;\n')
+ file.write(' #local I=0;\n')
+ file.write(' #while (I<Ds)\n')
+ file.write(' J\n')
+ file.write(' Arr[I]\n')
+ file.write(' #local I=I+1;\n')
+ file.write(' #local J=J+Add;\n')
+ file.write(' #end\n')
+ file.write(' }\n')
+ file.write('#end\n')
+
+
+ file.write('#macro BuildWriteMesh2(VecArr, NormArr, UVArr, U, V, FileName)\n')
+ #suppressed some file checking from original macro because no more separate files
+ file.write(' #local Write=0;\n')
+ file.write(' #debug concat("\\n\\n Building mesh2: \\n - vertex_vectors\\n")\n')
+ file.write(' #local NumVertices=dimension_size(VecArr,1);\n')
+ file.write(' #switch (Write)\n')
+ file.write(' #case(1)\n')
+ file.write(' #write(\n')
+ file.write(' MeshFile,\n')
+ file.write(' " vertex_vectors {\\n",\n')
+ file.write(' " ", str(NumVertices,0,0),"\\n "\n')
+ file.write(' )\n')
+ file.write(' #break\n')
+ file.write(' #case(2)\n')
+ file.write(' #write(\n')
+ file.write(' MeshFile,\n')
+ file.write(' "# Vertices: ",str(NumVertices,0,0),"\\n"\n')
+ file.write(' )\n')
+ file.write(' #break\n')
+ file.write(' #case(3)\n')
+ file.write(' #write(\n')
+ file.write(' MeshFile,\n')
+ file.write(' str(2*NumVertices,0,0),",\\n"\n')
+ file.write(' )\n')
+ file.write(' #break\n')
+ file.write(' #case(4)\n')
+ file.write(' #write(\n')
+ file.write(' MeshFile,\n')
+ file.write(' "#declare VertexVectors= array[",str(NumVertices,0,0),"] {\\n "\n')
+ file.write(' )\n')
+ file.write(' #break\n')
+ file.write(' #end\n')
+ file.write(' mesh2 {\n')
+ file.write(' vertex_vectors {\n')
+ file.write(' NumVertices\n')
+ file.write(' #local I=0;\n')
+ file.write(' #while (I<NumVertices)\n')
+ file.write(' VecArr[I]\n')
+ file.write(' #switch(Write)\n')
+ file.write(' #case(1)\n')
+ file.write(' #write(MeshFile, VecArr[I])\n')
+ file.write(' #break\n')
+ file.write(' #case(2)\n')
+ file.write(' #write(\n')
+ file.write(' MeshFile,\n')
+ file.write(' "v ", VecArr[I].x," ", VecArr[I].y," ", VecArr[I].z,"\\n"\n')
+ file.write(' )\n')
+ file.write(' #break\n')
+ file.write(' #case(3)\n')
+ file.write(' #write(\n')
+ file.write(' MeshFile,\n')
+ file.write(' VecArr[I].x,",", VecArr[I].y,",", VecArr[I].z,",\\n"\n')
+ file.write(' )\n')
+ file.write(' #break\n')
+ file.write(' #case(4)\n')
+ file.write(' #write(MeshFile, VecArr[I])\n')
+ file.write(' #break\n')
+ file.write(' #end\n')
+ file.write(' #local I=I+1;\n')
+ file.write(' #if(Write=1 | Write=4)\n')
+ file.write(' #if(mod(I,3)=0)\n')
+ file.write(' #write(MeshFile,"\\n ")\n')
+ file.write(' #end\n')
+ file.write(' #end \n')
+ file.write(' #end\n')
+ file.write(' #switch(Write)\n')
+ file.write(' #case(1)\n')
+ file.write(' #write(MeshFile,"\\n }\\n")\n')
+ file.write(' #break\n')
+ file.write(' #case(2)\n')
+ file.write(' #write(MeshFile,"\\n")\n')
+ file.write(' #break\n')
+ file.write(' #case(3)\n')
+ file.write(' // do nothing\n')
+ file.write(' #break\n')
+ file.write(' #case(4) \n')
+ file.write(' #write(MeshFile,"\\n}\\n")\n')
+ file.write(' #break\n')
+ file.write(' #end\n')
+ file.write(' }\n')
+
+ file.write(' #debug concat(" - normal_vectors\\n") \n')
+ file.write(' #local NumVertices=dimension_size(NormArr,1);\n')
+ file.write(' #switch(Write)\n')
+ file.write(' #case(1)\n')
+ file.write(' #write(\n')
+ file.write(' MeshFile,\n')
+ file.write(' " normal_vectors {\\n",\n')
+ file.write(' " ", str(NumVertices,0,0),"\\n "\n')
+ file.write(' )\n')
+ file.write(' #break\n')
+ file.write(' #case(2)\n')
+ file.write(' #write(\n')
+ file.write(' MeshFile,\n')
+ file.write(' "# Normals: ",str(NumVertices,0,0),"\\n"\n')
+ file.write(' )\n')
+ file.write(' #break\n')
+ file.write(' #case(3)\n')
+ file.write(' // do nothing\n')
+ file.write(' #break\n')
+ file.write(' #case(4)\n')
+ file.write(' #write(\n')
+ file.write(' MeshFile,\n')
+ file.write(' "#declare NormalVectors= array[",str(NumVertices,0,0),"] {\\n "\n')
+ file.write(' )\n')
+ file.write(' #break\n')
+ file.write(' #end\n')
+ file.write(' normal_vectors {\n')
+ file.write(' NumVertices\n')
+ file.write(' #local I=0;\n')
+ file.write(' #while (I<NumVertices)\n')
+ file.write(' NormArr[I]\n')
+ file.write(' #switch(Write)\n')
+ file.write(' #case(1)\n')
+ file.write(' #write(MeshFile NormArr[I])\n')
+ file.write(' #break\n')
+ file.write(' #case(2)\n')
+ file.write(' #write(\n')
+ file.write(' MeshFile,\n')
+ file.write(' "vn ", NormArr[I].x," ", NormArr[I].y," ", NormArr[I].z,"\\n"\n')
+ file.write(' )\n')
+ file.write(' #break\n')
+ file.write(' #case(3)\n')
+ file.write(' #write(\n')
+ file.write(' MeshFile,\n')
+ file.write(' NormArr[I].x,",", NormArr[I].y,",", NormArr[I].z,",\\n"\n')
+ file.write(' )\n')
+ file.write(' #break\n')
+ file.write(' #case(4)\n')
+ file.write(' #write(MeshFile NormArr[I])\n')
+ file.write(' #break\n')
+ file.write(' #end\n')
+ file.write(' #local I=I+1;\n')
+ file.write(' #if(Write=1 | Write=4) \n')
+ file.write(' #if(mod(I,3)=0)\n')
+ file.write(' #write(MeshFile,"\\n ")\n')
+ file.write(' #end\n')
+ file.write(' #end\n')
+ file.write(' #end\n')
+ file.write(' #switch(Write)\n')
+ file.write(' #case(1)\n')
+ file.write(' #write(MeshFile,"\\n }\\n")\n')
+ file.write(' #break\n')
+ file.write(' #case(2)\n')
+ file.write(' #write(MeshFile,"\\n")\n')
+ file.write(' #break\n')
+ file.write(' #case(3)\n')
+ file.write(' //do nothing\n')
+ file.write(' #break\n')
+ file.write(' #case(4)\n')
+ file.write(' #write(MeshFile,"\\n}\\n")\n')
+ file.write(' #break\n')
+ file.write(' #end\n')
+ file.write(' }\n')
+
+ file.write(' #debug concat(" - uv_vectors\\n") \n')
+ file.write(' #local NumVertices=dimension_size(UVArr,1);\n')
+ file.write(' #switch(Write)\n')
+ file.write(' #case(1)\n')
+ file.write(' #write(\n')
+ file.write(' MeshFile, \n')
+ file.write(' " uv_vectors {\\n",\n')
+ file.write(' " ", str(NumVertices,0,0),"\\n "\n')
+ file.write(' )\n')
+ file.write(' #break\n')
+ file.write(' #case(2)\n')
+ file.write(' #write(\n')
+ file.write(' MeshFile,\n')
+ file.write(' "# UV-vectors: ",str(NumVertices,0,0),"\\n"\n')
+ file.write(' )\n')
+ file.write(' #break\n')
+ file.write(' #case(3)\n')
+ file.write(' // do nothing, *.pcm does not support uv-vectors\n')
+ file.write(' #break\n')
+ file.write(' #case(4)\n')
+ file.write(' #write(\n')
+ file.write(' MeshFile,\n')
+ file.write(' "#declare UVVectors= array[",str(NumVertices,0,0),"] {\\n "\n')
+ file.write(' )\n')
+ file.write(' #break\n')
+ file.write(' #end\n')
+ file.write(' uv_vectors {\n')
+ file.write(' NumVertices\n')
+ file.write(' #local I=0;\n')
+ file.write(' #while (I<NumVertices)\n')
+ file.write(' UVArr[I]\n')
+ file.write(' #switch(Write)\n')
+ file.write(' #case(1)\n')
+ file.write(' #write(MeshFile UVArr[I])\n')
+ file.write(' #break\n')
+ file.write(' #case(2)\n')
+ file.write(' #write(\n')
+ file.write(' MeshFile,\n')
+ file.write(' "vt ", UVArr[I].u," ", UVArr[I].v,"\\n"\n')
+ file.write(' )\n')
+ file.write(' #break\n')
+ file.write(' #case(3)\n')
+ file.write(' //do nothing\n')
+ file.write(' #break\n')
+ file.write(' #case(4)\n')
+ file.write(' #write(MeshFile UVArr[I])\n')
+ file.write(' #break\n')
+ file.write(' #end\n')
+ file.write(' #local I=I+1; \n')
+ file.write(' #if(Write=1 | Write=4)\n')
+ file.write(' #if(mod(I,3)=0)\n')
+ file.write(' #write(MeshFile,"\\n ")\n')
+ file.write(' #end \n')
+ file.write(' #end\n')
+ file.write(' #end \n')
+ file.write(' #switch(Write)\n')
+ file.write(' #case(1)\n')
+ file.write(' #write(MeshFile,"\\n }\\n")\n')
+ file.write(' #break\n')
+ file.write(' #case(2)\n')
+ file.write(' #write(MeshFile,"\\n")\n')
+ file.write(' #break\n')
+ file.write(' #case(3)\n')
+ file.write(' //do nothing\n')
+ file.write(' #break\n')
+ file.write(' #case(4)\n')
+ file.write(' #write(MeshFile,"\\n}\\n")\n')
+ file.write(' #break\n')
+ file.write(' #end\n')
+ file.write(' }\n')
+ file.write('\n')
+ file.write(' #debug concat(" - face_indices\\n") \n')
+ file.write(' #declare NumFaces=U*V*2;\n')
+ file.write(' #switch(Write)\n')
+ file.write(' #case(1)\n')
+ file.write(' #write(\n')
+ file.write(' MeshFile,\n')
+ file.write(' " face_indices {\\n"\n')
+ file.write(' " ", str(NumFaces,0,0),"\\n "\n')
+ file.write(' )\n')
+ file.write(' #break\n')
+ file.write(' #case(2)\n')
+ file.write(' #write (\n')
+ file.write(' MeshFile,\n')
+ file.write(' "# faces: ",str(NumFaces,0,0),"\\n"\n')
+ file.write(' )\n')
+ file.write(' #break\n')
+ file.write(' #case(3)\n')
+ file.write(' #write (\n')
+ file.write(' MeshFile,\n')
+ file.write(' "0,",str(NumFaces,0,0),",\\n"\n')
+ file.write(' )\n')
+ file.write(' #break\n')
+ file.write(' #case(4)\n')
+ file.write(' #write(\n')
+ file.write(' MeshFile,\n')
+ file.write(' "#declare FaceIndices= array[",str(NumFaces,0,0),"] {\\n "\n')
+ file.write(' )\n')
+ file.write(' #break\n')
+ file.write(' #end\n')
+ file.write(' face_indices {\n')
+ file.write(' NumFaces\n')
+ file.write(' #local I=0;\n')
+ file.write(' #local H=0;\n')
+ file.write(' #local NumVertices=dimension_size(VecArr,1);\n')
+ file.write(' #while (I<V)\n')
+ file.write(' #local J=0;\n')
+ file.write(' #while (J<U)\n')
+ file.write(' #local Ind=(I*U)+I+J;\n')
+ file.write(' <Ind, Ind+1, Ind+U+2>, <Ind, Ind+U+1, Ind+U+2>\n')
+ file.write(' #switch(Write)\n')
+ file.write(' #case(1)\n')
+ file.write(' #write(\n')
+ file.write(' MeshFile,\n')
+ file.write(' <Ind, Ind+1, Ind+U+2>, <Ind, Ind+U+1, Ind+U+2>\n')
+ file.write(' )\n')
+ file.write(' #break\n')
+ file.write(' #case(2)\n')
+ file.write(' #write(\n')
+ file.write(' MeshFile,\n')
+ file.write(' "f ",Ind+1,"/",Ind+1,"/",Ind+1," ",Ind+1+1,"/",Ind+1+1,"/",Ind+1+1," ",Ind+U+2+1,"/",Ind+U+2+1,"/",Ind+U+2+1,"\\n",\n')
+ file.write(' "f ",Ind+U+1+1,"/",Ind+U+1+1,"/",Ind+U+1+1," ",Ind+1,"/",Ind+1,"/",Ind+1," ",Ind+U+2+1,"/",Ind+U+2+1,"/",Ind+U+2+1,"\\n"\n')
+ file.write(' )\n')
+ file.write(' #break\n')
+ file.write(' #case(3)\n')
+ file.write(' #write(\n')
+ file.write(' MeshFile,\n')
+ file.write(' Ind,",",Ind+NumVertices,",",Ind+1,",",Ind+1+NumVertices,",",Ind+U+2,",",Ind+U+2+NumVertices,",\\n"\n')
+ file.write(' Ind+U+1,",",Ind+U+1+NumVertices,",",Ind,",",Ind+NumVertices,",",Ind+U+2,",",Ind+U+2+NumVertices,",\\n"\n')
+ file.write(' )\n')
+ file.write(' #break\n')
+ file.write(' #case(4)\n')
+ file.write(' #write(\n')
+ file.write(' MeshFile,\n')
+ file.write(' <Ind, Ind+1, Ind+U+2>, <Ind, Ind+U+1, Ind+U+2>\n')
+ file.write(' )\n')
+ file.write(' #break\n')
+ file.write(' #end\n')
+ file.write(' #local J=J+1;\n')
+ file.write(' #local H=H+1;\n')
+ file.write(' #if(Write=1 | Write=4)\n')
+ file.write(' #if(mod(H,3)=0)\n')
+ file.write(' #write(MeshFile,"\\n ")\n')
+ file.write(' #end \n')
+ file.write(' #end\n')
+ file.write(' #end\n')
+ file.write(' #local I=I+1;\n')
+ file.write(' #end\n')
+ file.write(' }\n')
+ file.write(' #switch(Write)\n')
+ file.write(' #case(1)\n')
+ file.write(' #write(MeshFile, "\\n }\\n}")\n')
+ file.write(' #fclose MeshFile\n')
+ file.write(' #debug concat(" Done writing\\n")\n')
+ file.write(' #break\n')
+ file.write(' #case(2)\n')
+ file.write(' #fclose MeshFile\n')
+ file.write(' #debug concat(" Done writing\\n")\n')
+ file.write(' #break\n')
+ file.write(' #case(3)\n')
+ file.write(' #fclose MeshFile\n')
+ file.write(' #debug concat(" Done writing\\n")\n')
+ file.write(' #break\n')
+ file.write(' #case(4)\n')
+ file.write(' #write(MeshFile, "\\n}\\n}")\n')
+ file.write(' #fclose MeshFile\n')
+ file.write(' #debug concat(" Done writing\\n")\n')
+ file.write(' #break\n')
+ file.write(' #end\n')
+ file.write(' }\n')
+ file.write('#end\n')
+
+ file.write('#macro MSM(SplineArray, SplRes, Interp_type, InterpRes, FileName)\n')
+ file.write(' #declare Build=CheckFileName(FileName);\n')
+ file.write(' #if(Build=0)\n')
+ file.write(' #debug concat("\\n Parsing mesh2 from file: ", FileName, "\\n")\n')
+ file.write(' #include FileName\n')
+ file.write(' object{Surface}\n')
+ file.write(' #else\n')
+ file.write(' #local NumVertices=(SplRes+1)*(InterpRes+1);\n')
+ file.write(' #local NumFaces=SplRes*InterpRes*2;\n')
+ file.write(' #debug concat("\\n Calculating ",str(NumVertices,0,0)," vertices for ", str(NumFaces,0,0)," triangles\\n\\n")\n')
+ file.write(' #local VecArr=array[NumVertices]\n')
+ file.write(' #local NormArr=array[NumVertices]\n')
+ file.write(' #local UVArr=array[NumVertices]\n')
+ file.write(' #local N=dimension_size(SplineArray,1);\n')
+ file.write(' #local TempSplArr0=array[N];\n')
+ file.write(' #local TempSplArr1=array[N];\n')
+ file.write(' #local TempSplArr2=array[N];\n')
+ file.write(' #local PosStep=1/SplRes;\n')
+ file.write(' #local InterpStep=1/InterpRes;\n')
+ file.write(' #local Count=0;\n')
+ file.write(' #local Pos=0;\n')
+ file.write(' #while(Pos<=1)\n')
+ file.write(' #local I=0;\n')
+ file.write(' #if (Pos=0)\n')
+ file.write(' #while (I<N)\n')
+ file.write(' #local Spl=spline{SplineArray[I]}\n')
+ file.write(' #local TempSplArr0[I]=<0,0,0>+Spl(Pos);\n')
+ file.write(' #local TempSplArr1[I]=<0,0,0>+Spl(Pos+PosStep);\n')
+ file.write(' #local TempSplArr2[I]=<0,0,0>+Spl(Pos-PosStep);\n')
+ file.write(' #local I=I+1;\n')
+ file.write(' #end\n')
+ file.write(' #local S0=BuildSpline(TempSplArr0, Interp_type)\n')
+ file.write(' #local S1=BuildSpline(TempSplArr1, Interp_type)\n')
+ file.write(' #local S2=BuildSpline(TempSplArr2, Interp_type)\n')
+ file.write(' #else\n')
+ file.write(' #while (I<N)\n')
+ file.write(' #local Spl=spline{SplineArray[I]}\n')
+ file.write(' #local TempSplArr1[I]=<0,0,0>+Spl(Pos+PosStep);\n')
+ file.write(' #local I=I+1;\n')
+ file.write(' #end\n')
+ file.write(' #local S1=BuildSpline(TempSplArr1, Interp_type)\n')
+ file.write(' #end\n')
+ file.write(' #local J=0;\n')
+ file.write(' #while (J<=1)\n')
+ file.write(' #local P0=<0,0,0>+S0(J);\n')
+ file.write(' #local P1=<0,0,0>+S1(J);\n')
+ file.write(' #local P2=<0,0,0>+S2(J);\n')
+ file.write(' #local P3=<0,0,0>+S0(J+InterpStep);\n')
+ file.write(' #local P4=<0,0,0>+S0(J-InterpStep);\n')
+ file.write(' #local B1=P4-P0;\n')
+ file.write(' #local B2=P2-P0;\n')
+ file.write(' #local B3=P3-P0;\n')
+ file.write(' #local B4=P1-P0;\n')
+ file.write(' #local N1=vcross(B1,B2);\n')
+ file.write(' #local N2=vcross(B2,B3);\n')
+ file.write(' #local N3=vcross(B3,B4);\n')
+ file.write(' #local N4=vcross(B4,B1);\n')
+ file.write(' #local Norm=vnormalize((N1+N2+N3+N4));\n')
+ file.write(' #local VecArr[Count]=P0;\n')
+ file.write(' #local NormArr[Count]=Norm;\n')
+ file.write(' #local UVArr[Count]=<J,Pos>;\n')
+ file.write(' #local J=J+InterpStep;\n')
+ file.write(' #local Count=Count+1;\n')
+ file.write(' #end\n')
+ file.write(' #local S2=spline{S0}\n')
+ file.write(' #local S0=spline{S1}\n')
+ file.write(' #debug concat("\\r Done ", str(Count,0,0)," vertices : ", str(100*Count/NumVertices,0,2)," %")\n')
+ file.write(' #local Pos=Pos+PosStep;\n')
+ file.write(' #end\n')
+ file.write(' BuildWriteMesh2(VecArr, NormArr, UVArr, InterpRes, SplRes, "")\n')
+ file.write(' #end\n')
+ file.write('#end\n\n')
+
+ file.write('#macro Coons(Spl1, Spl2, Spl3, Spl4, Iter_U, Iter_V, FileName)\n')
+ file.write(' #declare Build=CheckFileName(FileName);\n')
+ file.write(' #if(Build=0)\n')
+ file.write(' #debug concat("\\n Parsing mesh2 from file: ", FileName, "\\n")\n')
+ file.write(' #include FileName\n')
+ file.write(' object{Surface}\n')
+ file.write(' #else\n')
+ file.write(' #local NumVertices=(Iter_U+1)*(Iter_V+1);\n')
+ file.write(' #local NumFaces=Iter_U*Iter_V*2;\n')
+ file.write(' #debug concat("\\n Calculating ", str(NumVertices,0,0), " vertices for ",str(NumFaces,0,0), " triangles\\n\\n")\n')
+ file.write(' #declare VecArr=array[NumVertices] \n')
+ file.write(' #declare NormArr=array[NumVertices] \n')
+ file.write(' #local UVArr=array[NumVertices] \n')
+ file.write(' #local Spl1_0=Spl1(0);\n')
+ file.write(' #local Spl2_0=Spl2(0);\n')
+ file.write(' #local Spl3_0=Spl3(0);\n')
+ file.write(' #local Spl4_0=Spl4(0);\n')
+ file.write(' #local UStep=1/Iter_U;\n')
+ file.write(' #local VStep=1/Iter_V;\n')
+ file.write(' #local Count=0;\n')
+ file.write(' #local I=0;\n')
+ file.write(' #while (I<=1)\n')
+ file.write(' #local Im=1-I;\n')
+ file.write(' #local J=0;\n')
+ file.write(' #while (J<=1)\n')
+ file.write(' #local Jm=1-J;\n')
+ file.write(' #local C0=Im*Jm*(Spl1_0)+Im*J*(Spl2_0)+I*J*(Spl3_0)+I*Jm*(Spl4_0);\n')
+ file.write(' #local P0=LInterpolate(I, Spl1(J), Spl3(Jm)) + \n')
+ file.write(' LInterpolate(Jm, Spl2(I), Spl4(Im))-C0;\n')
+ file.write(' #declare VecArr[Count]=P0;\n')
+ file.write(' #local UVArr[Count]=<J,I>;\n')
+ file.write(' #local J=J+UStep;\n')
+ file.write(' #local Count=Count+1;\n')
+ file.write(' #end\n')
+ file.write(' #debug concat(\n')
+ file.write(' "\r Done ", str(Count,0,0)," vertices : ",\n')
+ file.write(' str(100*Count/NumVertices,0,2)," %"\n')
+ file.write(' )\n')
+ file.write(' #local I=I+VStep;\n')
+ file.write(' #end\n')
+ file.write(' #debug "\r Normals "\n')
+ file.write(' #local Count=0;\n')
+ file.write(' #local I=0;\n')
+ file.write(' #while (I<=Iter_V)\n')
+ file.write(' #local J=0;\n')
+ file.write(' #while (J<=Iter_U)\n')
+ file.write(' #local Ind=(I*Iter_U)+I+J;\n')
+ file.write(' #local P0=VecArr[Ind];\n')
+ file.write(' #if(J=0)\n')
+ file.write(' #local P1=P0+(P0-VecArr[Ind+1]);\n')
+ file.write(' #else\n')
+ file.write(' #local P1=VecArr[Ind-1];\n')
+ file.write(' #end\n')
+ file.write(' #if (J=Iter_U)\n')
+ file.write(' #local P2=P0+(P0-VecArr[Ind-1]);\n')
+ file.write(' #else\n')
+ file.write(' #local P2=VecArr[Ind+1];\n')
+ file.write(' #end\n')
+ file.write(' #if (I=0)\n')
+ file.write(' #local P3=P0+(P0-VecArr[Ind+Iter_U+1]);\n')
+ file.write(' #else\n')
+ file.write(' #local P3=VecArr[Ind-Iter_U-1];\n')
+ file.write(' #end\n')
+ file.write(' #if (I=Iter_V)\n')
+ file.write(' #local P4=P0+(P0-VecArr[Ind-Iter_U-1]);\n')
+ file.write(' #else\n')
+ file.write(' #local P4=VecArr[Ind+Iter_U+1];\n')
+ file.write(' #end\n')
+ file.write(' #local B1=P4-P0;\n')
+ file.write(' #local B2=P2-P0;\n')
+ file.write(' #local B3=P3-P0;\n')
+ file.write(' #local B4=P1-P0;\n')
+ file.write(' #local N1=vcross(B1,B2);\n')
+ file.write(' #local N2=vcross(B2,B3);\n')
+ file.write(' #local N3=vcross(B3,B4);\n')
+ file.write(' #local N4=vcross(B4,B1);\n')
+ file.write(' #local Norm=vnormalize((N1+N2+N3+N4));\n')
+ file.write(' #declare NormArr[Count]=Norm;\n')
+ file.write(' #local J=J+1;\n')
+ file.write(' #local Count=Count+1;\n')
+ file.write(' #end\n')
+ file.write(' #debug concat("\r Done ", str(Count,0,0)," normals : ",str(100*Count/NumVertices,0,2), " %")\n')
+ file.write(' #local I=I+1;\n')
+ file.write(' #end\n')
+ file.write(' BuildWriteMesh2(VecArr, NormArr, UVArr, Iter_U, Iter_V, FileName)\n')
+ file.write(' #end\n')
+ file.write('#end\n\n')
+
+ if bezier_sweep == False:
+ tabWrite("#declare %s =\n"%dataname)
+ if ob.pov.curveshape == 'sphere_sweep' and bezier_sweep == False:
+ tabWrite("union {\n")
+ for spl in ob.data.splines:
+ if spl.type != "BEZIER":
+ spl_type = "linear"
+ if spl.type == "NURBS":
+ spl_type = "cubic"
+ points=spl.points
+ numPoints=len(points)
+ if spl.use_cyclic_u:
+ numPoints+=3
+
+ tabWrite("sphere_sweep { %s_spline %s,\n"%(spl_type,numPoints))
+ if spl.use_cyclic_u:
+ pt1 = points[len(points)-1]
+ wpt1 = pt1.co
+ tabWrite("<%.4g,%.4g,%.4g>,%.4g\n" %(wpt1[0], wpt1[1], wpt1[2], pt1.radius*ob.data.bevel_depth))
+ for pt in points:
+ wpt = pt.co
+ tabWrite("<%.4g,%.4g,%.4g>,%.4g\n" %(wpt[0], wpt[1], wpt[2], pt.radius*ob.data.bevel_depth))
+ if spl.use_cyclic_u:
+ for i in range (0,2):
+ endPt=points[i]
+ wpt = endPt.co
+ tabWrite("<%.4g,%.4g,%.4g>,%.4g\n" %(wpt[0], wpt[1], wpt[2], endPt.radius*ob.data.bevel_depth))
+
+
+ tabWrite("}\n")
+
+ if ob.pov.curveshape == 'sor':
+ for spl in ob.data.splines:
+ if spl.type in {'POLY','NURBS'}:
+ points=spl.points
+ numPoints=len(points)
+ tabWrite("sor { %s,\n"%numPoints)
+ for pt in points:
+ wpt = pt.co
+ tabWrite("<%.4g,%.4g>\n" %(wpt[0], wpt[1]))
+ else:
+ tabWrite("box { 0,0\n")
+ if ob.pov.curveshape in {'lathe','prism'}:
+ spl = ob.data.splines[0]
+ if spl.type == "BEZIER":
+ points=spl.bezier_points
+ lenCur=len(points)-1
+ lenPts=lenCur*4
+ ifprism = ''
+ if ob.pov.curveshape in {'prism'}:
+ height = ob.data.extrude
+ ifprism = '-%s, %s,'%(height, height)
+ lenCur+=1
+ lenPts+=4
+ tabWrite("%s { bezier_spline %s %s,\n"%(ob.pov.curveshape,ifprism,lenPts))
+ for i in range(0,lenCur):
+ p1=points[i].co
+ pR=points[i].handle_right
+ end = i+1
+ if i == lenCur-1 and ob.pov.curveshape in {'prism'}:
+ end = 0
+ pL=points[end].handle_left
+ p2=points[end].co
+ line="<%.4g,%.4g>"%(p1[0],p1[1])
+ line+="<%.4g,%.4g>"%(pR[0],pR[1])
+ line+="<%.4g,%.4g>"%(pL[0],pL[1])
+ line+="<%.4g,%.4g>"%(p2[0],p2[1])
+ tabWrite("%s\n" %line)
+ else:
+ points=spl.points
+ lenCur=len(points)
+ lenPts=lenCur
+ ifprism = ''
+ if ob.pov.curveshape in {'prism'}:
+ height = ob.data.extrude
+ ifprism = '-%s, %s,'%(height, height)
+ lenPts+=3
+ spl_type = 'quadratic'
+ if spl.type == 'POLY':
+ spl_type = 'linear'
+ tabWrite("%s { %s_spline %s %s,\n"%(ob.pov.curveshape,spl_type,ifprism,lenPts))
+ if ob.pov.curveshape in {'prism'}:
+ pt = points[len(points)-1]
+ wpt = pt.co
+ tabWrite("<%.4g,%.4g>\n" %(wpt[0], wpt[1]))
+ for pt in points:
+ wpt = pt.co
+ tabWrite("<%.4g,%.4g>\n" %(wpt[0], wpt[1]))
+ if ob.pov.curveshape in {'prism'}:
+ for i in range(2):
+ pt = points[i]
+ wpt = pt.co
+ tabWrite("<%.4g,%.4g>\n" %(wpt[0], wpt[1]))
+ if bezier_sweep:
+ spl = ob.data.splines[0]
+ points=spl.bezier_points
+ lenCur = len(points)-1
+ numPoints = lenCur*4
+ if spl.use_cyclic_u:
+ lenCur += 1
+ numPoints += 4
+ tabWrite("#declare %s_bezier_points = array[%s]{\n"%(dataname,numPoints))
+ for i in range(lenCur):
+ p1=points[i].co
+ pR=points[i].handle_right
+ end = i+1
+ if spl.use_cyclic_u and i == (lenCur - 1):
+ end = 0
+ pL=points[end].handle_left
+ p2=points[end].co
+ line="<%.4g,%.4g,%.4f>"%(p1[0],p1[1],p1[2])
+ line+="<%.4g,%.4g,%.4f>"%(pR[0],pR[1],pR[2])
+ line+="<%.4g,%.4g,%.4f>"%(pL[0],pL[1],pL[2])
+ line+="<%.4g,%.4g,%.4f>"%(p2[0],p2[1],p2[2])
+ tabWrite("%s\n" %line)
+ tabWrite("}\n")
+ #tabWrite('#include "bezier_spheresweep.inc"\n') #now inlined
+ tabWrite('#declare %s = object{Shape_Bezierpoints_Sphere_Sweep(%s, %s_bezier_points, %.4f) \n'%(dataname,ob.data.resolution_u,dataname,ob.data.bevel_depth))
+ if ob.pov.curveshape in {'loft'}:
+ tabWrite('object {MSM(%s,%s,"c",%s,"")\n'%(dataname,ob.pov.res_u,ob.pov.res_v))
+ if ob.pov.curveshape in {'birail'}:
+ splines = '%s1,%s2,%s3,%s4'%(dataname,dataname,dataname,dataname)
+ tabWrite('object {Coons(%s, %s, %s, "")\n'%(splines,ob.pov.res_u,ob.pov.res_v))
+ povMatName = "Default_texture"
+ if ob.active_material:
+ #povMatName = string_strip_hyphen(bpy.path.clean_name(ob.active_material.name))
+ try:
+ material = ob.active_material
+ writeObjectMaterial(material, ob)
+ except IndexError:
+ print(me)
+ #tabWrite("texture {%s}\n"%povMatName)
+ if ob.pov.curveshape in {'prism'}:
+ tabWrite("rotate <90,0,0>\n")
+ tabWrite("scale y*-1\n" )
+ tabWrite("}\n")
+
+#################################################################
+
+
def exportMeta(metas):
# TODO - blenders 'motherball' naming is not supported.
@@ -1452,24 +2161,24 @@ def write_pov(filename, scene=None, info_callback=None):
# channeldata = fire
# sc_fr = '%s/%s/%s/%05d' % (efutil.export_path, efutil.scene_filename(), bpy.context.scene.name, bpy.context.scene.frame_current)
- # if not os.path.exists( sc_fr ):
- # os.makedirs(sc_fr)
+ # if not os.path.exists( sc_fr ):
+ # os.makedirs(sc_fr)
#
- # smoke_filename = '%s.smoke' % bpy.path.clean_name(domain.name)
- # smoke_path = '/'.join([sc_fr, smoke_filename])
+ # smoke_filename = '%s.smoke' % bpy.path.clean_name(domain.name)
+ # smoke_path = '/'.join([sc_fr, smoke_filename])
#
- # with open(smoke_path, 'wb') as smoke_file:
- # # Binary densitygrid file format
- # #
- # # File header
- # smoke_file.write(b'SMOKE') #magic number
- # smoke_file.write(struct.pack('<I', big_res[0]))
- # smoke_file.write(struct.pack('<I', big_res[1]))
- # smoke_file.write(struct.pack('<I', big_res[2]))
+ # with open(smoke_path, 'wb') as smoke_file:
+ # # Binary densitygrid file format
+ # #
+ # # File header
+ # smoke_file.write(b'SMOKE') #magic number
+ # smoke_file.write(struct.pack('<I', big_res[0]))
+ # smoke_file.write(struct.pack('<I', big_res[1]))
+ # smoke_file.write(struct.pack('<I', big_res[2]))
# Density data
- # smoke_file.write(struct.pack('<%df'%len(channeldata), *channeldata))
+ # smoke_file.write(struct.pack('<%df'%len(channeldata), *channeldata))
#
- # LuxLog('Binary SMOKE file written: %s' % (smoke_path))
+ # LuxLog('Binary SMOKE file written: %s' % (smoke_path))
#return big_res[0], big_res[1], big_res[2], channeldata
@@ -1533,9 +2242,9 @@ def write_pov(filename, scene=None, info_callback=None):
file.write("}\n")
- #file.write(" interpolate 1\n")
- #file.write(" frequency 0\n")
- #file.write(" }\n")
+ #file.write(" interpolate 1\n")
+ #file.write(" frequency 0\n")
+ #file.write(" }\n")
#file.write("}\n")
ob_num = 0
@@ -1544,7 +2253,8 @@ def write_pov(filename, scene=None, info_callback=None):
# XXX I moved all those checks here, as there is no need to compute names
# for object we won't export here!
- if ob.type in {'LAMP', 'CAMERA', 'EMPTY', 'META', 'ARMATURE', 'LATTICE'}:
+ if (ob.type in {'LAMP', 'CAMERA', 'EMPTY',
+ 'META', 'ARMATURE', 'LATTICE'}):
continue
smokeFlag=False
for mod in ob.modifiers:
@@ -1734,18 +2444,6 @@ def write_pov(filename, scene=None, info_callback=None):
if renderEmitter == False:
continue #don't render mesh, skip to next object.
- try:
- me = ob.to_mesh(scene, True, 'RENDER')
- except:
- # happens when curves cant be made into meshes because of no-data
- continue
-
- importance = ob.pov.importance_value
- me_materials = me.materials
- me_faces = me.tessfaces[:]
-
- if not me or not me_faces:
- continue
#############################################
# Generating a name for object just like materials to be able to use it
@@ -1780,265 +2478,596 @@ def write_pov(filename, scene=None, info_callback=None):
continue
print("Writing Down First Occurence")
+
+############################################Povray Primitives
+ # special exportCurves() function takes care of writing
+ # lathe, sphere_sweep, birail, and loft
+ if ob.type == 'CURVE' and (ob.pov.curveshape in
+ {'lathe', 'sphere_sweep', 'loft'}):
+ continue #Don't render proxy mesh, skip to next object
+
+ if ob.pov.object_as == 'ISOSURFACE':
+ tabWrite("#declare %s = isosurface{ \n"% povdataname)
+ tabWrite("function{ \n")
+ textName = ob.pov.iso_function_text
+ if textName:
+ node_tree = bpy.context.scene.node_tree
+ for node in node_tree.nodes:
+ if node.bl_idname == "IsoPropsNode" and node.label == ob.name:
+ for inp in node.inputs:
+ if inp:
+ tabWrite("#declare %s = %.6g;\n"%(inp.name,inp.default_value))
+
+ text = bpy.data.texts[textName]
+ for line in text.lines:
+ split = line.body.split()
+ if split[0] != "#declare":
+ tabWrite("%s\n"%line.body)
+ else:
+ tabWrite("abs(x) - 2 + y")
+ tabWrite("}\n")
+ tabWrite("threshold %.6g\n"%ob.pov.threshold)
+ tabWrite("max_gradient %.6g\n"%ob.pov.max_gradient)
+ tabWrite("accuracy %.6g\n"%ob.pov.accuracy)
+ tabWrite("contained_by { ")
+ if ob.pov.contained_by == "sphere":
+ tabWrite("sphere {0,%.6g}}\n"%ob.pov.container_scale)
+ else:
+ tabWrite("box {-%.6g,%.6g}}\n"%(ob.pov.container_scale,ob.pov.container_scale))
+ if ob.pov.all_intersections:
+ tabWrite("all_intersections\n")
+ else:
+ if ob.pov.max_trace > 1:
+ tabWrite("max_trace %.6g\n"%ob.pov.max_trace)
+ povMatName = "Default_texture"
+ if ob.active_material:
+ #povMatName = string_strip_hyphen(bpy.path.clean_name(ob.active_material.name))
+ try:
+ material = ob.active_material
+ writeObjectMaterial(material, ob)
+ except IndexError:
+ print(me)
+ #tabWrite("texture {%s}\n"%povMatName)
+ tabWrite("scale %.6g\n"%(1/ob.pov.container_scale))
+ tabWrite("}\n")
+ continue #Don't render proxy mesh, skip to next object
- uv_textures = me.tessface_uv_textures
- if len(uv_textures) > 0:
- if me.uv_textures.active and uv_textures.active.data:
- uv_layer = uv_textures.active.data
- else:
- uv_layer = None
+ if ob.pov.object_as == 'SUPERELLIPSOID':
+ tabWrite("#declare %s = superellipsoid{ <%.4f,%.4f>\n"%(povdataname,ob.pov.se_n2,ob.pov.se_n1))
+ povMatName = "Default_texture"
+ if ob.active_material:
+ #povMatName = string_strip_hyphen(bpy.path.clean_name(ob.active_material.name))
+ try:
+ material = ob.active_material
+ writeObjectMaterial(material, ob)
+ except IndexError:
+ print(me)
+ #tabWrite("texture {%s}\n"%povMatName)
+ write_object_modifiers(scene,ob,file)
+ tabWrite("}\n")
+ continue #Don't render proxy mesh, skip to next object
+
+
+ if ob.pov.object_as == 'SUPERTORUS':
+ rMajor = ob.pov.st_major_radius
+ rMinor = ob.pov.st_minor_radius
+ ring = ob.pov.st_ring
+ cross = ob.pov.st_cross
+ accuracy=ob.pov.st_accuracy
+ gradient=ob.pov.st_max_gradient
+ ############Inline Supertorus macro
+ file.write("#macro Supertorus(RMj, RMn, MajorControl, MinorControl, Accuracy, MaxGradient)\n")
+ file.write(" #local CP = 2/MinorControl;\n")
+ file.write(" #local RP = 2/MajorControl;\n")
+ file.write(" isosurface {\n")
+ file.write(" function { pow( pow(abs(pow(pow(abs(x),RP) + pow(abs(z),RP), 1/RP) - RMj),CP) + pow(abs(y),CP) ,1/CP) - RMn }\n")
+ file.write(" threshold 0\n")
+ file.write(" contained_by {box {<-RMj-RMn,-RMn,-RMj-RMn>, < RMj+RMn, RMn, RMj+RMn>}}\n")
+ file.write(" #if(MaxGradient >= 1)\n")
+ file.write(" max_gradient MaxGradient\n")
+ file.write(" #else\n")
+ file.write(" evaluate 1, 10, 0.1\n")
+ file.write(" #end\n")
+ file.write(" accuracy Accuracy\n")
+ file.write(" }\n")
+ file.write("#end\n")
+ ############
+ tabWrite("#declare %s = object{ Supertorus( %.4g,%.4g,%.4g,%.4g,%.4g,%.4g)\n"%(povdataname,rMajor,rMinor,ring,cross,accuracy,gradient))
+ povMatName = "Default_texture"
+ if ob.active_material:
+ #povMatName = string_strip_hyphen(bpy.path.clean_name(ob.active_material.name))
+ try:
+ material = ob.active_material
+ writeObjectMaterial(material, ob)
+ except IndexError:
+ print(me)
+ #tabWrite("texture {%s}\n"%povMatName)
+ write_object_modifiers(scene,ob,file)
+ tabWrite("rotate x*90\n")
+ tabWrite("}\n")
+ continue #Don't render proxy mesh, skip to next object
+
+
+ if ob.pov.object_as == 'PLANE':
+ tabWrite("#declare %s = plane{ <0,0,1>,1\n"%povdataname)
+ povMatName = "Default_texture"
+ if ob.active_material:
+ #povMatName = string_strip_hyphen(bpy.path.clean_name(ob.active_material.name))
+ try:
+ material = ob.active_material
+ writeObjectMaterial(material, ob)
+ except IndexError:
+ print(me)
+ #tabWrite("texture {%s}\n"%povMatName)
+ write_object_modifiers(scene,ob,file)
+ #tabWrite("rotate x*90\n")
+ tabWrite("}\n")
+ continue #Don't render proxy mesh, skip to next object
- try:
- #vcol_layer = me.vertex_colors.active.data
- vcol_layer = me.tessface_vertex_colors.active.data
- except AttributeError:
- vcol_layer = None
- faces_verts = [f.vertices[:] for f in me_faces]
- faces_normals = [f.normal[:] for f in me_faces]
- verts_normals = [v.normal[:] for v in me.vertices]
+ if ob.pov.object_as == 'BOX':
+ tabWrite("#declare %s = box { -1,1\n"%povdataname)
+ povMatName = "Default_texture"
+ if ob.active_material:
+ #povMatName = string_strip_hyphen(bpy.path.clean_name(ob.active_material.name))
+ try:
+ material = ob.active_material
+ writeObjectMaterial(material, ob)
+ except IndexError:
+ print(me)
+ #tabWrite("texture {%s}\n"%povMatName)
+ write_object_modifiers(scene,ob,file)
+ #tabWrite("rotate x*90\n")
+ tabWrite("}\n")
+ continue #Don't render proxy mesh, skip to next object
+
+
+ if ob.pov.object_as == 'CONE':
+ br = ob.pov.cone_base_radius
+ cr = ob.pov.cone_cap_radius
+ bz = ob.pov.cone_base_z
+ cz = ob.pov.cone_cap_z
+ tabWrite("#declare %s = cone { <0,0,%.4f>,%.4f,<0,0,%.4f>,%.4f\n"%(povdataname,bz,br,cz,cr))
+ povMatName = "Default_texture"
+ if ob.active_material:
+ #povMatName = string_strip_hyphen(bpy.path.clean_name(ob.active_material.name))
+ try:
+ material = ob.active_material
+ writeObjectMaterial(material, ob)
+ except IndexError:
+ print(me)
+ #tabWrite("texture {%s}\n"%povMatName)
+ write_object_modifiers(scene,ob,file)
+ #tabWrite("rotate x*90\n")
+ tabWrite("}\n")
+ continue #Don't render proxy mesh, skip to next object
- # quads incur an extra face
- quadCount = sum(1 for f in faces_verts if len(f) == 4)
+ if ob.pov.object_as == 'CYLINDER':
+ tabWrite("#declare %s = cylinder { <0,0,1>,<0,0,-1>,1\n"%povdataname)
+ povMatName = "Default_texture"
+ if ob.active_material:
+ #povMatName = string_strip_hyphen(bpy.path.clean_name(ob.active_material.name))
+ try:
+ material = ob.active_material
+ writeObjectMaterial(material, ob)
+ except IndexError:
+ print(me)
+ #tabWrite("texture {%s}\n"%povMatName)
+ write_object_modifiers(scene,ob,file)
+ #tabWrite("rotate x*90\n")
+ tabWrite("}\n")
+ continue #Don't render proxy mesh, skip to next object
+
+
+ if ob.pov.object_as == 'HEIGHT_FIELD':
+ data = ""
+ filename = ob.pov.hf_filename
+ data += '"%s"'%filename
+ gamma = ' gamma %.4f'%ob.pov.hf_gamma
+ data += gamma
+ if ob.pov.hf_premultiplied:
+ data += ' premultiplied on'
+ if ob.pov.hf_smooth:
+ data += ' smooth'
+ if ob.pov.hf_water > 0:
+ data += ' water_level %.4f'%ob.pov.hf_water
+ #hierarchy = ob.pov.hf_hierarchy
+ tabWrite('#declare %s = height_field { %s\n'%(povdataname,data))
+ povMatName = "Default_texture"
+ if ob.active_material:
+ #povMatName = string_strip_hyphen(bpy.path.clean_name(ob.active_material.name))
+ try:
+ material = ob.active_material
+ writeObjectMaterial(material, ob)
+ except IndexError:
+ print(me)
+ #tabWrite("texture {%s}\n"%povMatName)
+ write_object_modifiers(scene,ob,file)
+ tabWrite("rotate x*90\n")
+ tabWrite("translate <-0.5,0.5,0>\n")
+ tabWrite("scale <0,-1,0>\n")
+ tabWrite("}\n")
+ continue #Don't render proxy mesh, skip to next object
- # Use named declaration to allow reference e.g. for baking. MR
- file.write("\n")
- tabWrite("#declare %s =\n" % povdataname)
- tabWrite("mesh2 {\n")
- tabWrite("vertex_vectors {\n")
- tabWrite("%d" % len(me.vertices)) # vert count
-
- tabStr = tab * tabLevel
- for v in me.vertices:
- if linebreaksinlists:
- file.write(",\n")
- file.write(tabStr + "<%.6f, %.6f, %.6f>" % v.co[:]) # vert count
+
+ if ob.pov.object_as == 'SPHERE':
+
+ tabWrite("#declare %s = sphere { 0,%6f\n"%(povdataname,ob.pov.sphere_radius))
+ povMatName = "Default_texture"
+ if ob.active_material:
+ #povMatName = string_strip_hyphen(bpy.path.clean_name(ob.active_material.name))
+ try:
+ material = ob.active_material
+ writeObjectMaterial(material, ob)
+ except IndexError:
+ print(me)
+ #tabWrite("texture {%s}\n"%povMatName)
+ write_object_modifiers(scene,ob,file)
+ #tabWrite("rotate x*90\n")
+ tabWrite("}\n")
+ continue #Don't render proxy mesh, skip to next object
+
+ if ob.pov.object_as == 'TORUS':
+ tabWrite("#declare %s = torus { %.4f,%.4f\n"%(povdataname,ob.pov.torus_major_radius,ob.pov.torus_minor_radius))
+ povMatName = "Default_texture"
+ if ob.active_material:
+ #povMatName = string_strip_hyphen(bpy.path.clean_name(ob.active_material.name))
+ try:
+ material = ob.active_material
+ writeObjectMaterial(material, ob)
+ except IndexError:
+ print(me)
+ #tabWrite("texture {%s}\n"%povMatName)
+ write_object_modifiers(scene,ob,file)
+ tabWrite("rotate x*90\n")
+ tabWrite("}\n")
+ continue #Don't render proxy mesh, skip to next object
+
+
+ if ob.pov.object_as == 'PARAMETRIC':
+ tabWrite("#declare %s = parametric {\n"%povdataname)
+ tabWrite("function { %s }\n"%ob.pov.x_eq)
+ tabWrite("function { %s }\n"%ob.pov.y_eq)
+ tabWrite("function { %s }\n"%ob.pov.z_eq)
+ tabWrite("<%.4f,%.4f>, <%.4f,%.4f>\n"%(ob.pov.u_min,ob.pov.v_min,ob.pov.u_max,ob.pov.v_max))
+ if ob.pov.contained_by == "sphere":
+ tabWrite("contained_by { sphere{0, 2} }\n")
else:
- file.write(", ")
- file.write("<%.6f, %.6f, %.6f>" % v.co[:]) # vert count
- #tabWrite("<%.6f, %.6f, %.6f>" % v.co[:]) # vert count
- file.write("\n")
- tabWrite("}\n")
+ tabWrite("contained_by { box{-2, 2} }\n")
+ tabWrite("max_gradient %.6f\n"%ob.pov.max_gradient)
+ tabWrite("accuracy %.6f\n"%ob.pov.accuracy)
+ tabWrite("precompute 10 x,y,z\n")
+ tabWrite("}\n")
+ continue #Don't render proxy mesh, skip to next object
+
+ if ob.pov.object_as == 'POLYCIRCLE':
+ #TODO write below macro Once:
+ #if write_polytocircle_macro_once == 0:
+ file.write("/****************************\n")
+ file.write("This macro was written by 'And'.\n")
+ file.write("Link:(http://news.povray.org/povray.binaries.scene-files/)\n")
+ file.write("****************************/\n")
+ file.write("//from math.inc:\n")
+ file.write("#macro VPerp_Adjust(V, Axis)\n")
+ file.write(" vnormalize(vcross(vcross(Axis, V), Axis))\n")
+ file.write("#end\n")
+ file.write("//Then for the actual macro\n")
+ file.write("#macro Shape_Slice_Plane_2P_1V(point1, point2, clip_direct)\n")
+ file.write("#local p1 = point1 + <0,0,0>;\n")
+ file.write("#local p2 = point2 + <0,0,0>;\n")
+ file.write("#local clip_v = vnormalize(clip_direct + <0,0,0>);\n")
+ file.write("#local direct_v1 = vnormalize(p2 - p1);\n")
+ file.write("#if(vdot(direct_v1, clip_v) = 1)\n")
+ file.write(' #error "Shape_Slice_Plane_2P_1V error: Can\'t decide plane"\n')
+ file.write("#end\n\n")
+ file.write("#local norm = -vnormalize(clip_v - direct_v1*vdot(direct_v1,clip_v));\n")
+ file.write("#local d = vdot(norm, p1);\n")
+ file.write("plane{\n")
+ file.write("norm, d\n")
+ file.write("}\n")
+ file.write("#end\n\n")
+ file.write("//polygon to circle\n")
+ file.write("#macro Shape_Polygon_To_Circle_Blending(_polygon_n, _side_face, _polygon_circumscribed_radius, _circle_radius, _height)\n")
+ file.write("#local n = int(_polygon_n);\n")
+ file.write("#if(n < 3)\n")
+ file.write(" #error ""\n")
+ file.write("#end\n\n")
+ file.write("#local front_v = VPerp_Adjust(_side_face, z);\n")
+ file.write("#if(vdot(front_v, x) >= 0)\n")
+ file.write(" #local face_ang = acos(vdot(-y, front_v));\n")
+ file.write("#else\n")
+ file.write(" #local face_ang = -acos(vdot(-y, front_v));\n")
+ file.write("#end\n")
+ file.write("#local polyg_ext_ang = 2*pi/n;\n")
+ file.write("#local polyg_outer_r = _polygon_circumscribed_radius;\n")
+ file.write("#local polyg_inner_r = polyg_outer_r*cos(polyg_ext_ang/2);\n")
+ file.write("#local cycle_r = _circle_radius;\n")
+ file.write("#local h = _height;\n")
+ file.write("#if(polyg_outer_r < 0 | cycle_r < 0 | h <= 0)\n")
+ file.write(' #error "error: each side length must be positive"\n')
+ file.write("#end\n\n")
+ file.write("#local multi = 1000;\n")
+ file.write("#local poly_obj =\n")
+ file.write("polynomial{\n")
+ file.write("4,\n")
+ file.write("xyz(0,2,2): multi*1,\n")
+ file.write("xyz(2,0,1): multi*2*h,\n")
+ file.write("xyz(1,0,2): multi*2*(polyg_inner_r-cycle_r),\n")
+ file.write("xyz(2,0,0): multi*(-h*h),\n")
+ file.write("xyz(0,0,2): multi*(-pow(cycle_r - polyg_inner_r, 2)),\n")
+ file.write("xyz(1,0,1): multi*2*h*(-2*polyg_inner_r + cycle_r),\n")
+ file.write("xyz(1,0,0): multi*2*h*h*polyg_inner_r,\n")
+ file.write("xyz(0,0,1): multi*2*h*polyg_inner_r*(polyg_inner_r - cycle_r),\n")
+ file.write("xyz(0,0,0): multi*(-pow(polyg_inner_r*h, 2))\n")
+ file.write("sturm\n")
+ file.write("}\n\n")
+ file.write("#local mockup1 =\n")
+ file.write("difference{\n")
+ file.write(" cylinder{\n")
+ file.write(" <0,0,0.0>,<0,0,h>, max(polyg_outer_r, cycle_r)\n")
+ file.write(" }\n\n")
+ file.write(" #for(i, 0, n-1)\n")
+ file.write(" object{\n")
+ file.write(" poly_obj\n")
+ file.write(" inverse\n")
+ file.write(" rotate <0, 0, -90 + degrees(polyg_ext_ang*i)>\n")
+ file.write(" }\n")
+ file.write(" object{\n")
+ file.write(" Shape_Slice_Plane_2P_1V(<polyg_inner_r,0,0>,<cycle_r,0,h>,x)\n")
+ file.write(" rotate <0, 0, -90 + degrees(polyg_ext_ang*i)>\n")
+ file.write(" }\n")
+ file.write(" #end\n")
+ file.write("}\n\n")
+ file.write("object{\n")
+ file.write("mockup1\n")
+ file.write("rotate <0, 0, degrees(face_ang)>\n")
+ file.write("}\n")
+ file.write("#end\n")
+ #Use the macro
+ ngon = ob.pov.polytocircle_ngon
+ ngonR = ob.pov.polytocircle_ngonR
+ circleR = ob.pov.polytocircle_circleR
+ tabWrite("#declare %s = object { Shape_Polygon_To_Circle_Blending(%s, z, %.4f, %.4f, 2) rotate x*180 translate z*1\n"%(povdataname,ngon,ngonR,circleR))
+ tabWrite("}\n")
+ continue #Don't render proxy mesh, skip to next object
- # Build unique Normal list
- uniqueNormals = {}
- for fi, f in enumerate(me_faces):
- fv = faces_verts[fi]
- # [-1] is a dummy index, use a list so we can modify in place
- if f.use_smooth: # Use vertex normals
- for v in fv:
- key = verts_normals[v]
- uniqueNormals[key] = [-1]
- else: # Use face normal
- key = faces_normals[fi]
- uniqueNormals[key] = [-1]
-
- tabWrite("normal_vectors {\n")
- tabWrite("%d" % len(uniqueNormals)) # vert count
- idx = 0
- tabStr = tab * tabLevel
- for no, index in uniqueNormals.items():
- if linebreaksinlists:
- file.write(",\n")
- file.write(tabStr + "<%.6f, %.6f, %.6f>" % no) # vert count
+
+############################################else try to export mesh
+ else:
+ try:
+ me = ob.to_mesh(scene, True, 'RENDER')
+ except:
+ # happens when curves cant be made into meshes because of no-data
+ continue
+
+ importance = ob.pov.importance_value
+ me_materials = me.materials
+ me_faces = me.tessfaces[:]
+
+ if not me or not me_faces:
+ continue
+
+ uv_textures = me.tessface_uv_textures
+ if len(uv_textures) > 0:
+ if me.uv_textures.active and uv_textures.active.data:
+ uv_layer = uv_textures.active.data
else:
- file.write(", ")
- file.write("<%.6f, %.6f, %.6f>" % no) # vert count
- index[0] = idx
- idx += 1
- file.write("\n")
- tabWrite("}\n")
+ uv_layer = None
+
+ try:
+ #vcol_layer = me.vertex_colors.active.data
+ vcol_layer = me.tessface_vertex_colors.active.data
+ except AttributeError:
+ vcol_layer = None
- # Vertex colors
- vertCols = {} # Use for material colors also.
+ faces_verts = [f.vertices[:] for f in me_faces]
+ faces_normals = [f.normal[:] for f in me_faces]
+ verts_normals = [v.normal[:] for v in me.vertices]
- if uv_layer:
- # Generate unique UV's
- uniqueUVs = {}
- #n = 0
- for fi, uv in enumerate(uv_layer):
+ # quads incur an extra face
+ quadCount = sum(1 for f in faces_verts if len(f) == 4)
- if len(faces_verts[fi]) == 4:
- uvs = uv_layer[fi].uv[0], uv_layer[fi].uv[1], uv_layer[fi].uv[2], uv_layer[fi].uv[3]
+ # Use named declaration to allow reference e.g. for baking. MR
+ file.write("\n")
+ tabWrite("#declare %s =\n" % povdataname)
+ tabWrite("mesh2 {\n")
+ tabWrite("vertex_vectors {\n")
+ tabWrite("%d" % len(me.vertices)) # vert count
+
+ tabStr = tab * tabLevel
+ for v in me.vertices:
+ if linebreaksinlists:
+ file.write(",\n")
+ file.write(tabStr + "<%.6f, %.6f, %.6f>" % v.co[:]) # vert count
else:
- uvs = uv_layer[fi].uv[0], uv_layer[fi].uv[1], uv_layer[fi].uv[2]
+ file.write(", ")
+ file.write("<%.6f, %.6f, %.6f>" % v.co[:]) # vert count
+ #tabWrite("<%.6f, %.6f, %.6f>" % v.co[:]) # vert count
+ file.write("\n")
+ tabWrite("}\n")
- for uv in uvs:
- uniqueUVs[uv[:]] = [-1]
+ # Build unique Normal list
+ uniqueNormals = {}
+ for fi, f in enumerate(me_faces):
+ fv = faces_verts[fi]
+ # [-1] is a dummy index, use a list so we can modify in place
+ if f.use_smooth: # Use vertex normals
+ for v in fv:
+ key = verts_normals[v]
+ uniqueNormals[key] = [-1]
+ else: # Use face normal
+ key = faces_normals[fi]
+ uniqueNormals[key] = [-1]
- tabWrite("uv_vectors {\n")
- #print unique_uvs
- tabWrite("%d" % len(uniqueUVs)) # vert count
+ tabWrite("normal_vectors {\n")
+ tabWrite("%d" % len(uniqueNormals)) # vert count
idx = 0
tabStr = tab * tabLevel
- for uv, index in uniqueUVs.items():
+ for no, index in uniqueNormals.items():
if linebreaksinlists:
file.write(",\n")
- file.write(tabStr + "<%.6f, %.6f>" % uv)
+ file.write(tabStr + "<%.6f, %.6f, %.6f>" % no) # vert count
else:
file.write(", ")
- file.write("<%.6f, %.6f>" % uv)
+ file.write("<%.6f, %.6f, %.6f>" % no) # vert count
index[0] = idx
idx += 1
- '''
- else:
- # Just add 1 dummy vector, no real UV's
- tabWrite('1') # vert count
- file.write(',\n\t\t<0.0, 0.0>')
- '''
file.write("\n")
tabWrite("}\n")
- if me.vertex_colors:
- #Write down vertex colors as a texture for each vertex
- tabWrite("texture_list {\n")
- tabWrite("%d\n" % (((len(me_faces)-quadCount) * 3 )+ quadCount * 4)) # works only with tris and quad mesh for now
- VcolIdx=0
- if comments:
- file.write("\n //Vertex colors: one simple pigment texture per vertex\n")
- for fi, f in enumerate(me_faces):
- # annoying, index may be invalid
- material_index = f.material_index
- try:
- material = me_materials[material_index]
- except:
- material = None
- if material: #and material.use_vertex_color_paint: #Always use vertex color when there is some for now
-
- col = vcol_layer[fi]
+ # Vertex colors
+ vertCols = {} # Use for material colors also.
+
+ if uv_layer:
+ # Generate unique UV's
+ uniqueUVs = {}
+ #n = 0
+ for fi, uv in enumerate(uv_layer):
if len(faces_verts[fi]) == 4:
- cols = col.color1, col.color2, col.color3, col.color4
+ uvs = uv_layer[fi].uv[0], uv_layer[fi].uv[1], uv_layer[fi].uv[2], uv_layer[fi].uv[3]
else:
- cols = col.color1, col.color2, col.color3
-
- for col in cols:
- key = col[0], col[1], col[2], material_index # Material index!
- VcolIdx+=1
- vertCols[key] = [VcolIdx]
- if linebreaksinlists:
- tabWrite("texture {pigment{ color rgb <%6f,%6f,%6f> }}\n" % (col[0], col[1], col[2]))
- else:
- tabWrite("texture {pigment{ color rgb <%6f,%6f,%6f> }}" % (col[0], col[1], col[2]))
- tabStr = tab * tabLevel
- else:
- if material:
- # Multiply diffuse with SSS Color
- if material.subsurface_scattering.use:
- diffuse_color = [i * j for i, j in zip(material.subsurface_scattering.color[:], material.diffuse_color[:])]
- key = diffuse_color[0], diffuse_color[1], diffuse_color[2], \
- material_index
- vertCols[key] = [-1]
- else:
- diffuse_color = material.diffuse_color[:]
- key = diffuse_color[0], diffuse_color[1], diffuse_color[2], \
- material_index
- vertCols[key] = [-1]
+ uvs = uv_layer[fi].uv[0], uv_layer[fi].uv[1], uv_layer[fi].uv[2]
- tabWrite("\n}\n")
- # Face indices
- tabWrite("\nface_indices {\n")
- tabWrite("%d" % (len(me_faces) + quadCount)) # faces count
- tabStr = tab * tabLevel
+ for uv in uvs:
+ uniqueUVs[uv[:]] = [-1]
- for fi, f in enumerate(me_faces):
- fv = faces_verts[fi]
- material_index = f.material_index
- if len(fv) == 4:
- indices = (0, 1, 2), (0, 2, 3)
- else:
- indices = ((0, 1, 2),)
-
- if vcol_layer:
- col = vcol_layer[fi]
-
- if len(fv) == 4:
- cols = col.color1, col.color2, col.color3, col.color4
+ tabWrite("uv_vectors {\n")
+ #print unique_uvs
+ tabWrite("%d" % len(uniqueUVs)) # vert count
+ idx = 0
+ tabStr = tab * tabLevel
+ for uv, index in uniqueUVs.items():
+ if linebreaksinlists:
+ file.write(",\n")
+ file.write(tabStr + "<%.6f, %.6f>" % uv)
else:
- cols = col.color1, col.color2, col.color3
-
- if not me_materials or me_materials[material_index] is None: # No materials
- for i1, i2, i3 in indices:
- if linebreaksinlists:
- file.write(",\n")
- # vert count
- file.write(tabStr + "<%d,%d,%d>" % (fv[i1], fv[i2], fv[i3]))
- else:
- file.write(", ")
- file.write("<%d,%d,%d>" % (fv[i1], fv[i2], fv[i3])) # vert count
+ file.write(", ")
+ file.write("<%.6f, %.6f>" % uv)
+ index[0] = idx
+ idx += 1
+ '''
else:
- material = me_materials[material_index]
- for i1, i2, i3 in indices:
- if me.vertex_colors: #and material.use_vertex_color_paint:
- # Color per vertex - vertex color
-
- col1 = cols[i1]
- col2 = cols[i2]
- col3 = cols[i3]
+ # Just add 1 dummy vector, no real UV's
+ tabWrite('1') # vert count
+ file.write(',\n\t\t<0.0, 0.0>')
+ '''
+ file.write("\n")
+ tabWrite("}\n")
- ci1 = vertCols[col1[0], col1[1], col1[2], material_index][0]
- ci2 = vertCols[col2[0], col2[1], col2[2], material_index][0]
- ci3 = vertCols[col3[0], col3[1], col3[2], material_index][0]
+ if me.vertex_colors:
+ #Write down vertex colors as a texture for each vertex
+ tabWrite("texture_list {\n")
+ tabWrite("%d\n" % (((len(me_faces)-quadCount) * 3 )+ quadCount * 4)) # works only with tris and quad mesh for now
+ VcolIdx=0
+ if comments:
+ file.write("\n //Vertex colors: one simple pigment texture per vertex\n")
+ for fi, f in enumerate(me_faces):
+ # annoying, index may be invalid
+ material_index = f.material_index
+ try:
+ material = me_materials[material_index]
+ except:
+ material = None
+ if material: #and material.use_vertex_color_paint: #Always use vertex color when there is some for now
+
+ col = vcol_layer[fi]
+
+ if len(faces_verts[fi]) == 4:
+ cols = col.color1, col.color2, col.color3, col.color4
else:
- # Color per material - flat material color
+ cols = col.color1, col.color2, col.color3
+
+ for col in cols:
+ key = col[0], col[1], col[2], material_index # Material index!
+ VcolIdx+=1
+ vertCols[key] = [VcolIdx]
+ if linebreaksinlists:
+ tabWrite("texture {pigment{ color rgb <%6f,%6f,%6f> }}\n" % (col[0], col[1], col[2]))
+ else:
+ tabWrite("texture {pigment{ color rgb <%6f,%6f,%6f> }}" % (col[0], col[1], col[2]))
+ tabStr = tab * tabLevel
+ else:
+ if material:
+ # Multiply diffuse with SSS Color
if material.subsurface_scattering.use:
diffuse_color = [i * j for i, j in zip(material.subsurface_scattering.color[:], material.diffuse_color[:])]
+ key = diffuse_color[0], diffuse_color[1], diffuse_color[2], \
+ material_index
+ vertCols[key] = [-1]
else:
diffuse_color = material.diffuse_color[:]
- ci1 = ci2 = ci3 = vertCols[diffuse_color[0], diffuse_color[1], \
- diffuse_color[2], f.material_index][0]
- # ci are zero based index so we'll subtract 1 from them
- if linebreaksinlists:
- file.write(",\n")
- file.write(tabStr + "<%d,%d,%d>, %d,%d,%d" % \
- (fv[i1], fv[i2], fv[i3], ci1-1, ci2-1, ci3-1)) # vert count
- else:
- file.write(", ")
- file.write("<%d,%d,%d>, %d,%d,%d" % \
- (fv[i1], fv[i2], fv[i3], ci1-1, ci2-1, ci3-1)) # vert count
+ key = diffuse_color[0], diffuse_color[1], diffuse_color[2], \
+ material_index
+ vertCols[key] = [-1]
- file.write("\n")
- tabWrite("}\n")
+ tabWrite("\n}\n")
+ # Face indices
+ tabWrite("\nface_indices {\n")
+ tabWrite("%d" % (len(me_faces) + quadCount)) # faces count
+ tabStr = tab * tabLevel
- # normal_indices indices
- tabWrite("normal_indices {\n")
- tabWrite("%d" % (len(me_faces) + quadCount)) # faces count
- tabStr = tab * tabLevel
- for fi, fv in enumerate(faces_verts):
+ for fi, f in enumerate(me_faces):
+ fv = faces_verts[fi]
+ material_index = f.material_index
+ if len(fv) == 4:
+ indices = (0, 1, 2), (0, 2, 3)
+ else:
+ indices = ((0, 1, 2),)
- if len(fv) == 4:
- indices = (0, 1, 2), (0, 2, 3)
- else:
- indices = ((0, 1, 2),)
-
- for i1, i2, i3 in indices:
- if me_faces[fi].use_smooth:
- if linebreaksinlists:
- file.write(",\n")
- file.write(tabStr + "<%d,%d,%d>" %\
- (uniqueNormals[verts_normals[fv[i1]]][0],\
- uniqueNormals[verts_normals[fv[i2]]][0],\
- uniqueNormals[verts_normals[fv[i3]]][0])) # vert count
+ if vcol_layer:
+ col = vcol_layer[fi]
+
+ if len(fv) == 4:
+ cols = col.color1, col.color2, col.color3, col.color4
else:
- file.write(", ")
- file.write("<%d,%d,%d>" %\
- (uniqueNormals[verts_normals[fv[i1]]][0],\
- uniqueNormals[verts_normals[fv[i2]]][0],\
- uniqueNormals[verts_normals[fv[i3]]][0])) # vert count
+ cols = col.color1, col.color2, col.color3
+
+ if not me_materials or me_materials[material_index] is None: # No materials
+ for i1, i2, i3 in indices:
+ if linebreaksinlists:
+ file.write(",\n")
+ # vert count
+ file.write(tabStr + "<%d,%d,%d>" % (fv[i1], fv[i2], fv[i3]))
+ else:
+ file.write(", ")
+ file.write("<%d,%d,%d>" % (fv[i1], fv[i2], fv[i3])) # vert count
else:
- idx = uniqueNormals[faces_normals[fi]][0]
- if linebreaksinlists:
- file.write(",\n")
- file.write(tabStr + "<%d,%d,%d>" % (idx, idx, idx)) # vert count
- else:
- file.write(", ")
- file.write("<%d,%d,%d>" % (idx, idx, idx)) # vert count
+ material = me_materials[material_index]
+ for i1, i2, i3 in indices:
+ if me.vertex_colors: #and material.use_vertex_color_paint:
+ # Color per vertex - vertex color
+
+ col1 = cols[i1]
+ col2 = cols[i2]
+ col3 = cols[i3]
+
+ ci1 = vertCols[col1[0], col1[1], col1[2], material_index][0]
+ ci2 = vertCols[col2[0], col2[1], col2[2], material_index][0]
+ ci3 = vertCols[col3[0], col3[1], col3[2], material_index][0]
+ else:
+ # Color per material - flat material color
+ if material.subsurface_scattering.use:
+ diffuse_color = [i * j for i, j in zip(material.subsurface_scattering.color[:], material.diffuse_color[:])]
+ else:
+ diffuse_color = material.diffuse_color[:]
+ ci1 = ci2 = ci3 = vertCols[diffuse_color[0], diffuse_color[1], \
+ diffuse_color[2], f.material_index][0]
+ # ci are zero based index so we'll subtract 1 from them
+ if linebreaksinlists:
+ file.write(",\n")
+ file.write(tabStr + "<%d,%d,%d>, %d,%d,%d" % \
+ (fv[i1], fv[i2], fv[i3], ci1-1, ci2-1, ci3-1)) # vert count
+ else:
+ file.write(", ")
+ file.write("<%d,%d,%d>, %d,%d,%d" % \
+ (fv[i1], fv[i2], fv[i3], ci1-1, ci2-1, ci3-1)) # vert count
- file.write("\n")
- tabWrite("}\n")
+ file.write("\n")
+ tabWrite("}\n")
- if uv_layer:
- tabWrite("uv_indices {\n")
+ # normal_indices indices
+ tabWrite("normal_indices {\n")
tabWrite("%d" % (len(me_faces) + quadCount)) # faces count
tabStr = tab * tabLevel
for fi, fv in enumerate(faces_verts):
@@ -2048,256 +3077,416 @@ def write_pov(filename, scene=None, info_callback=None):
else:
indices = ((0, 1, 2),)
- uv = uv_layer[fi]
- if len(faces_verts[fi]) == 4:
- uvs = uv.uv[0][:], uv.uv[1][:], uv.uv[2][:], uv.uv[3][:]
- else:
- uvs = uv.uv[0][:], uv.uv[1][:], uv.uv[2][:]
-
for i1, i2, i3 in indices:
- if linebreaksinlists:
- file.write(",\n")
- file.write(tabStr + "<%d,%d,%d>" % (
- uniqueUVs[uvs[i1]][0],\
- uniqueUVs[uvs[i2]][0],\
- uniqueUVs[uvs[i3]][0]))
+ if me_faces[fi].use_smooth:
+ if linebreaksinlists:
+ file.write(",\n")
+ file.write(tabStr + "<%d,%d,%d>" %\
+ (uniqueNormals[verts_normals[fv[i1]]][0],\
+ uniqueNormals[verts_normals[fv[i2]]][0],\
+ uniqueNormals[verts_normals[fv[i3]]][0])) # vert count
+ else:
+ file.write(", ")
+ file.write("<%d,%d,%d>" %\
+ (uniqueNormals[verts_normals[fv[i1]]][0],\
+ uniqueNormals[verts_normals[fv[i2]]][0],\
+ uniqueNormals[verts_normals[fv[i3]]][0])) # vert count
else:
- file.write(", ")
- file.write("<%d,%d,%d>" % (
- uniqueUVs[uvs[i1]][0],\
- uniqueUVs[uvs[i2]][0],\
- uniqueUVs[uvs[i3]][0]))
+ idx = uniqueNormals[faces_normals[fi]][0]
+ if linebreaksinlists:
+ file.write(",\n")
+ file.write(tabStr + "<%d,%d,%d>" % (idx, idx, idx)) # vert count
+ else:
+ file.write(", ")
+ file.write("<%d,%d,%d>" % (idx, idx, idx)) # vert count
file.write("\n")
tabWrite("}\n")
- if me.materials:
- try:
- material = me.materials[0] # dodgy
- writeObjectMaterial(material, ob)
- except IndexError:
- print(me)
+ if uv_layer:
+ tabWrite("uv_indices {\n")
+ tabWrite("%d" % (len(me_faces) + quadCount)) # faces count
+ tabStr = tab * tabLevel
+ for fi, fv in enumerate(faces_verts):
- #Importance for radiosity sampling added here:
- tabWrite("radiosity { \n")
- tabWrite("importance %3g \n" % importance)
- tabWrite("}\n")
-
- tabWrite("}\n") # End of mesh block
- else:
- # No vertex colors, so write material colors as vertex colors
- for i, material in enumerate(me_materials):
-
- if material:
- # Multiply diffuse with SSS Color
- if material.subsurface_scattering.use:
- diffuse_color = [i * j for i, j in zip(material.subsurface_scattering.color[:], material.diffuse_color[:])]
- key = diffuse_color[0], diffuse_color[1], diffuse_color[2], i # i == f.mat
- vertCols[key] = [-1]
- else:
- diffuse_color = material.diffuse_color[:]
- key = diffuse_color[0], diffuse_color[1], diffuse_color[2], i # i == f.mat
- vertCols[key] = [-1]
-
- idx = 0
- LocalMaterialNames = []
- for col, index in vertCols.items():
- #if me_materials:
- mater = me_materials[col[3]]
- if me_materials is None: #XXX working?
- material_finish = DEF_MAT_NAME # not working properly,
- trans = 0.0
+ if len(fv) == 4:
+ indices = (0, 1, 2), (0, 2, 3)
+ else:
+ indices = ((0, 1, 2),)
+ uv = uv_layer[fi]
+ if len(faces_verts[fi]) == 4:
+ uvs = uv.uv[0][:], uv.uv[1][:], uv.uv[2][:], uv.uv[3][:]
else:
- material_finish = materialNames[mater.name]
- if mater.use_transparency:
- trans = 1.0 - mater.alpha
+ uvs = uv.uv[0][:], uv.uv[1][:], uv.uv[2][:]
+
+ for i1, i2, i3 in indices:
+ if linebreaksinlists:
+ file.write(",\n")
+ file.write(tabStr + "<%d,%d,%d>" % (
+ uniqueUVs[uvs[i1]][0],\
+ uniqueUVs[uvs[i2]][0],\
+ uniqueUVs[uvs[i3]][0]))
else:
- trans = 0.0
- if (mater.specular_color.s == 0.0):
- colored_specular_found = False
- else:
- colored_specular_found = True
+ file.write(", ")
+ file.write("<%d,%d,%d>" % (
+ uniqueUVs[uvs[i1]][0],\
+ uniqueUVs[uvs[i2]][0],\
+ uniqueUVs[uvs[i3]][0]))
+
+ file.write("\n")
+ tabWrite("}\n")
+
+ if me.materials:
+ try:
+ material = me.materials[0] # dodgy
+ writeObjectMaterial(material, ob)
+ except IndexError:
+ print(me)
+
+ #Importance for radiosity sampling added here:
+ tabWrite("radiosity { \n")
+ tabWrite("importance %3g \n" % importance)
+ tabWrite("}\n")
+
+ tabWrite("}\n") # End of mesh block
+ else:
+ # No vertex colors, so write material colors as vertex colors
+ for i, material in enumerate(me_materials):
+
+ if material:
+ # Multiply diffuse with SSS Color
+ if material.subsurface_scattering.use:
+ diffuse_color = [i * j for i, j in zip(material.subsurface_scattering.color[:], material.diffuse_color[:])]
+ key = diffuse_color[0], diffuse_color[1], diffuse_color[2], i # i == f.mat
+ vertCols[key] = [-1]
+ else:
+ diffuse_color = material.diffuse_color[:]
+ key = diffuse_color[0], diffuse_color[1], diffuse_color[2], i # i == f.mat
+ vertCols[key] = [-1]
+
+ idx = 0
+ LocalMaterialNames = []
+ for col, index in vertCols.items():
+ #if me_materials:
+ mater = me_materials[col[3]]
+ if me_materials is None: #XXX working?
+ material_finish = DEF_MAT_NAME # not working properly,
+ trans = 0.0
- if mater.use_transparency and mater.transparency_method == 'RAYTRACE':
- povFilter = mater.raytrace_transparency.filter * (1.0 - mater.alpha)
- trans = (1.0 - mater.alpha) - povFilter
else:
- povFilter = 0.0
+ material_finish = materialNames[mater.name]
+ if mater.use_transparency:
+ trans = 1.0 - mater.alpha
+ else:
+ trans = 0.0
+ if (mater.specular_color.s == 0.0):
+ colored_specular_found = False
+ else:
+ colored_specular_found = True
+
+ if mater.use_transparency and mater.transparency_method == 'RAYTRACE':
+ povFilter = mater.raytrace_transparency.filter * (1.0 - mater.alpha)
+ trans = (1.0 - mater.alpha) - povFilter
+ else:
+ povFilter = 0.0
+
+ ##############SF
+ texturesDif = ""
+ texturesSpec = ""
+ texturesNorm = ""
+ texturesAlpha = ""
+ #proceduralFlag=False
+ for t in mater.texture_slots:
+ if t and t.use and t.texture.type != 'IMAGE' and t.texture.type != 'NONE':
+ proceduralFlag=True
+ image_filename = "PAT_%s"%string_strip_hyphen(bpy.path.clean_name(t.texture.name))
+ if image_filename:
+ if t.use_map_color_diffuse:
+ texturesDif = image_filename
+ # colvalue = t.default_value # UNUSED
+ t_dif = t
+ if t_dif.texture.pov.tex_gamma_enable:
+ imgGamma = (" gamma %.3g " % t_dif.texture.pov.tex_gamma_value)
+ if t.use_map_specular or t.use_map_raymir:
+ texturesSpec = image_filename
+ # colvalue = t.default_value # UNUSED
+ t_spec = t
+ if t.use_map_normal:
+ texturesNorm = image_filename
+ # colvalue = t.normal_factor * 10.0 # UNUSED
+ #textNormName=t.texture.image.name + ".normal"
+ #was the above used? --MR
+ t_nor = t
+ if t.use_map_alpha:
+ texturesAlpha = image_filename
+ # colvalue = t.alpha_factor * 10.0 # UNUSED
+ #textDispName=t.texture.image.name + ".displ"
+ #was the above used? --MR
+ t_alpha = t
+
+ if t and t.texture.type == 'IMAGE' and t.use and t.texture.image and t.texture.pov.tex_pattern_type == 'emulator':
+ proceduralFlag=False
+ if t.texture.image.packed_file:
+ orig_image_filename=t.texture.image.filepath_raw
+ unpackedfilename= os.path.join(preview_dir,("unpacked_img_"+(string_strip_hyphen(bpy.path.clean_name(t.texture.name)))))
+ if not os.path.exists(unpackedfilename):
+ # record which images that were newly copied and can be safely
+ # cleaned up
+ unpacked_images.append(unpackedfilename)
+ t.texture.image.filepath_raw=unpackedfilename
+ t.texture.image.save()
+ image_filename = unpackedfilename
+ t.texture.image.filepath_raw=orig_image_filename
+ else:
+ image_filename = path_image(t.texture.image)
+ # IMAGE SEQUENCE BEGINS
+ if image_filename:
+ if bpy.data.images[t.texture.image.name].source == 'SEQUENCE':
+ korvaa = "." + str(bpy.data.textures[t.texture.name].image_user.frame_offset + 1).zfill(3) + "."
+ image_filename = image_filename.replace(".001.", korvaa)
+ print(" seq debug ")
+ print(image_filename)
+ # IMAGE SEQUENCE ENDS
+ imgGamma = ""
+ if image_filename:
+ if t.use_map_color_diffuse:
+ texturesDif = image_filename
+ # colvalue = t.default_value # UNUSED
+ t_dif = t
+ if t_dif.texture.pov.tex_gamma_enable:
+ imgGamma = (" gamma %.3g " % t_dif.texture.pov.tex_gamma_value)
+ if t.use_map_specular or t.use_map_raymir:
+ texturesSpec = image_filename
+ # colvalue = t.default_value # UNUSED
+ t_spec = t
+ if t.use_map_normal:
+ texturesNorm = image_filename
+ # colvalue = t.normal_factor * 10.0 # UNUSED
+ #textNormName=t.texture.image.name + ".normal"
+ #was the above used? --MR
+ t_nor = t
+ if t.use_map_alpha:
+ texturesAlpha = image_filename
+ # colvalue = t.alpha_factor * 10.0 # UNUSED
+ #textDispName=t.texture.image.name + ".displ"
+ #was the above used? --MR
+ t_alpha = t
+
+ ####################################################################################
+
+
+ file.write("\n")
+ # THIS AREA NEEDS TO LEAVE THE TEXTURE OPEN UNTIL ALL MAPS ARE WRITTEN DOWN.
+ # --MR
+ currentMatName = string_strip_hyphen(materialNames[mater.name])
+ LocalMaterialNames.append(currentMatName)
+ file.write("\n #declare MAT_%s = \ntexture{\n" % currentMatName)
+
+ ################################################################################
- ##############SF
- texturesDif = ""
- texturesSpec = ""
- texturesNorm = ""
- texturesAlpha = ""
- #proceduralFlag=False
- for t in mater.texture_slots:
- if t and t.use and t.texture.type != 'IMAGE' and t.texture.type != 'NONE':
- proceduralFlag=True
- image_filename = "PAT_%s"%string_strip_hyphen(bpy.path.clean_name(t.texture.name))
- if image_filename:
- if t.use_map_color_diffuse:
- texturesDif = image_filename
- # colvalue = t.default_value # UNUSED
- t_dif = t
- if t_dif.texture.pov.tex_gamma_enable:
- imgGamma = (" gamma %.3g " % t_dif.texture.pov.tex_gamma_value)
- if t.use_map_specular or t.use_map_raymir:
- texturesSpec = image_filename
- # colvalue = t.default_value # UNUSED
- t_spec = t
- if t.use_map_normal:
- texturesNorm = image_filename
- # colvalue = t.normal_factor * 10.0 # UNUSED
- #textNormName=t.texture.image.name + ".normal"
- #was the above used? --MR
- t_nor = t
- if t.use_map_alpha:
- texturesAlpha = image_filename
- # colvalue = t.alpha_factor * 10.0 # UNUSED
- #textDispName=t.texture.image.name + ".displ"
- #was the above used? --MR
- t_alpha = t
-
- if t and t.texture.type == 'IMAGE' and t.use and t.texture.image and t.texture.pov.tex_pattern_type == 'emulator':
- proceduralFlag=False
- if t.texture.image.packed_file:
- orig_image_filename=t.texture.image.filepath_raw
- workDir=os.path.dirname(__file__)
- previewDir=os.path.join(workDir, "preview")
- unpackedfilename= os.path.join(previewDir,("unpacked_img_"+(string_strip_hyphen(bpy.path.clean_name(t.texture.name)))))
- if not os.path.exists(unpackedfilename):
- # record which images that were newly copied and can be safely
- # cleaned up
- unpacked_images.append(unpackedfilename)
- t.texture.image.filepath_raw=unpackedfilename
- t.texture.image.save()
- image_filename = unpackedfilename
- t.texture.image.filepath_raw=orig_image_filename
- else:
- image_filename = path_image(t.texture.image)
- # IMAGE SEQUENCE BEGINS
- if image_filename:
- if bpy.data.images[t.texture.image.name].source == 'SEQUENCE':
- korvaa = "." + str(bpy.data.textures[t.texture.name].image_user.frame_offset + 1).zfill(3) + "."
- image_filename = image_filename.replace(".001.", korvaa)
- print(" seq debug ")
- print(image_filename)
- # IMAGE SEQUENCE ENDS
- imgGamma = ""
- if image_filename:
- if t.use_map_color_diffuse:
- texturesDif = image_filename
- # colvalue = t.default_value # UNUSED
- t_dif = t
- if t_dif.texture.pov.tex_gamma_enable:
- imgGamma = (" gamma %.3g " % t_dif.texture.pov.tex_gamma_value)
- if t.use_map_specular or t.use_map_raymir:
- texturesSpec = image_filename
- # colvalue = t.default_value # UNUSED
- t_spec = t
- if t.use_map_normal:
- texturesNorm = image_filename
- # colvalue = t.normal_factor * 10.0 # UNUSED
- #textNormName=t.texture.image.name + ".normal"
- #was the above used? --MR
- t_nor = t
- if t.use_map_alpha:
- texturesAlpha = image_filename
- # colvalue = t.alpha_factor * 10.0 # UNUSED
- #textDispName=t.texture.image.name + ".displ"
- #was the above used? --MR
- t_alpha = t
-
- ####################################################################################
-
-
- file.write("\n")
- # THIS AREA NEEDS TO LEAVE THE TEXTURE OPEN UNTIL ALL MAPS ARE WRITTEN DOWN.
- # --MR
- currentMatName = string_strip_hyphen(materialNames[mater.name])
- LocalMaterialNames.append(currentMatName)
- file.write("\n #declare MAT_%s = \ntexture{\n" % currentMatName)
-
- ################################################################################
-
- if mater.pov.replacement_text != "":
- file.write("%s\n" % mater.pov.replacement_text)
- #################################################################################
- if mater.diffuse_shader == 'MINNAERT':
- tabWrite("\n")
- tabWrite("aoi\n")
- tabWrite("texture_map {\n")
- tabWrite("[%.3g finish {diffuse %.3g}]\n" % \
- (mater.darkness / 2.0, 2.0 - mater.darkness))
- tabWrite("[%.3g\n" % (1.0 - (mater.darkness / 2.0)))
-
- if mater.diffuse_shader == 'FRESNEL':
- # For FRESNEL diffuse in POV, we'll layer slope patterned textures
- # with lamp vector as the slope vector and nest one slope per lamp
- # into each texture map's entry.
-
- c = 1
- while (c <= lampCount):
- tabWrite("slope { lampTarget%s }\n" % (c))
+ if mater.pov.replacement_text != "":
+ file.write("%s\n" % mater.pov.replacement_text)
+ #################################################################################
+ if mater.diffuse_shader == 'MINNAERT':
+ tabWrite("\n")
+ tabWrite("aoi\n")
tabWrite("texture_map {\n")
- # Diffuse Fresnel value and factor go up to five,
- # other kind of values needed: used the number 5 below to remap
tabWrite("[%.3g finish {diffuse %.3g}]\n" % \
- ((5.0 - mater.diffuse_fresnel) / 5,
- (mater.diffuse_intensity *
- ((5.0 - mater.diffuse_fresnel_factor) / 5))))
- tabWrite("[%.3g\n" % ((mater.diffuse_fresnel_factor / 5) *
- (mater.diffuse_fresnel / 5.0)))
- c += 1
-
- # if shader is a 'FRESNEL' or 'MINNAERT': slope pigment pattern or aoi
- # and texture map above, the rest below as one of its entry
-
- if texturesSpec != "" or texturesAlpha != "":
- if texturesSpec != "":
- # tabWrite("\n")
- tabWrite("pigment_pattern {\n")
- if texturesSpec and texturesSpec.startswith("PAT_"):
- tabWrite("function{f%s(x,y,z).grey}" %texturesSpec)
+ (mater.darkness / 2.0, 2.0 - mater.darkness))
+ tabWrite("[%.3g\n" % (1.0 - (mater.darkness / 2.0)))
+
+ if mater.diffuse_shader == 'FRESNEL':
+ # For FRESNEL diffuse in POV, we'll layer slope patterned textures
+ # with lamp vector as the slope vector and nest one slope per lamp
+ # into each texture map's entry.
+
+ c = 1
+ while (c <= lampCount):
+ tabWrite("slope { lampTarget%s }\n" % (c))
+ tabWrite("texture_map {\n")
+ # Diffuse Fresnel value and factor go up to five,
+ # other kind of values needed: used the number 5 below to remap
+ tabWrite("[%.3g finish {diffuse %.3g}]\n" % \
+ ((5.0 - mater.diffuse_fresnel) / 5,
+ (mater.diffuse_intensity *
+ ((5.0 - mater.diffuse_fresnel_factor) / 5))))
+ tabWrite("[%.3g\n" % ((mater.diffuse_fresnel_factor / 5) *
+ (mater.diffuse_fresnel / 5.0)))
+ c += 1
+
+ # if shader is a 'FRESNEL' or 'MINNAERT': slope pigment pattern or aoi
+ # and texture map above, the rest below as one of its entry
+
+ if texturesSpec != "" or texturesAlpha != "":
+ if texturesSpec != "":
+ # tabWrite("\n")
+ tabWrite("pigment_pattern {\n")
+ if texturesSpec and texturesSpec.startswith("PAT_"):
+ tabWrite("function{f%s(x,y,z).grey}" %texturesSpec)
+ else:
+ # POV-Ray "scale" is not a number of repetitions factor, but its
+ # inverse, a standard scale factor.
+ # Offset seems needed relatively to scale so probably center of the
+ # scale is not the same in blender and POV
+ mappingSpec =imgMapTransforms(t_spec)
+ # mappingSpec = "translate <%.4g,%.4g,%.4g> scale <%.4g,%.4g,%.4g>\n" % \
+ # (-t_spec.offset.x, t_spec.offset.y, t_spec.offset.z,
+ # 1.0 / t_spec.scale.x, 1.0 / t_spec.scale.y,
+ # 1.0 / t_spec.scale.z)
+ tabWrite("uv_mapping image_map{%s \"%s\" %s}\n" % \
+ (imageFormat(texturesSpec), texturesSpec, imgMap(t_spec)))
+ tabWrite("%s\n" % mappingSpec)
+ tabWrite("}\n")
+ tabWrite("texture_map {\n")
+ tabWrite("[0 \n")
+
+ if texturesDif == "":
+ if texturesAlpha != "":
+ tabWrite("\n")
+ if texturesAlpha and texturesAlpha.startswith("PAT_"):
+ tabWrite("function{f%s(x,y,z).transmit}\n" %texturesAlpha)
+ else:
+ # POV-Ray "scale" is not a number of repetitions factor, but its
+ # inverse, a standard scale factor.
+ # Offset seems needed relatively to scale so probably center of the
+ # scale is not the same in blender and POV
+ mappingAlpha = imgMapTransforms(t_alpha)
+ # mappingAlpha = " translate <%.4g, %.4g, %.4g> " \
+ # "scale <%.4g, %.4g, %.4g>\n" % \
+ # (-t_alpha.offset.x, -t_alpha.offset.y,
+ # t_alpha.offset.z, 1.0 / t_alpha.scale.x,
+ # 1.0 / t_alpha.scale.y, 1.0 / t_alpha.scale.z)
+ tabWrite("pigment {pigment_pattern {uv_mapping image_map" \
+ "{%s \"%s\" %s}%s" % \
+ (imageFormat(texturesAlpha), texturesAlpha,
+ imgMap(t_alpha), mappingAlpha))
+ tabWrite("}\n")
+ tabWrite("pigment_map {\n")
+ tabWrite("[0 color rgbft<0,0,0,1,1>]\n")
+ tabWrite("[1 color rgbft<%.3g, %.3g, %.3g, %.3g, %.3g>]\n" % \
+ (col[0], col[1], col[2], povFilter, trans))
+ tabWrite("}\n")
+ tabWrite("}\n")
+
+ else:
+
+ tabWrite("pigment {rgbft<%.3g, %.3g, %.3g, %.3g, %.3g>}\n" % \
+ (col[0], col[1], col[2], povFilter, trans))
+
+ if texturesSpec != "":
+ # Level 1 is no specular
+ tabWrite("finish {%s}\n" % (safety(material_finish, Level=1)))
+
+ else:
+ # Level 2 is translated spec
+ tabWrite("finish {%s}\n" % (safety(material_finish, Level=2)))
+
else:
+ mappingDif = imgMapTransforms(t_dif)
+
+ if texturesAlpha != "":
+ mappingAlpha = imgMapTransforms(t_alpha)
+ # mappingAlpha = " translate <%.4g,%.4g,%.4g> " \
+ # "scale <%.4g,%.4g,%.4g>" % \
+ # (-t_alpha.offset.x, -t_alpha.offset.y,
+ # t_alpha.offset.z, 1.0 / t_alpha.scale.x,
+ # 1.0 / t_alpha.scale.y, 1.0 / t_alpha.scale.z)
+ tabWrite("pigment {\n")
+ tabWrite("pigment_pattern {\n")
+ if texturesAlpha and texturesAlpha.startswith("PAT_"):
+ tabWrite("function{f%s(x,y,z).transmit}\n" %texturesAlpha)
+ else:
+ tabWrite("uv_mapping image_map{%s \"%s\" %s}%s}\n" % \
+ (imageFormat(texturesAlpha), texturesAlpha,
+ imgMap(t_alpha), mappingAlpha))
+ tabWrite("pigment_map {\n")
+ tabWrite("[0 color rgbft<0,0,0,1,1>]\n")
+ #if texturesAlpha and texturesAlpha.startswith("PAT_"):
+ #tabWrite("[1 pigment{%s}]\n" %texturesDif)
+ if texturesDif and not texturesDif.startswith("PAT_"):
+ tabWrite("[1 uv_mapping image_map {%s \"%s\" %s} %s]\n" % \
+ (imageFormat(texturesDif), texturesDif,
+ (imgGamma + imgMap(t_dif)), mappingDif))
+ elif texturesDif and texturesDif.startswith("PAT_"):
+ tabWrite("[1 %s]\n" %texturesDif)
+ tabWrite("}\n")
+ tabWrite("}\n")
+ if texturesAlpha and texturesAlpha.startswith("PAT_"):
+ tabWrite("}\n")
+
+ else:
+ if texturesDif and texturesDif.startswith("PAT_"):
+ tabWrite("pigment{%s}\n" %texturesDif)
+ else:
+ tabWrite("pigment {uv_mapping image_map {%s \"%s\" %s}%s}\n" % \
+ (imageFormat(texturesDif), texturesDif,
+ (imgGamma + imgMap(t_dif)), mappingDif))
+
+ if texturesSpec != "":
+ # Level 1 is no specular
+ tabWrite("finish {%s}\n" % (safety(material_finish, Level=1)))
+
+ else:
+ # Level 2 is translated specular
+ tabWrite("finish {%s}\n" % (safety(material_finish, Level=2)))
+
+ ## scale 1 rotate y*0
+ #imageMap = ("{image_map {%s \"%s\" %s }\n" % \
+ # (imageFormat(textures),textures,imgMap(t_dif)))
+ #tabWrite("uv_mapping pigment %s} %s finish {%s}\n" % \
+ # (imageMap,mapping,safety(material_finish)))
+ #tabWrite("pigment {uv_mapping image_map {%s \"%s\" %s}%s} " \
+ # "finish {%s}\n" % \
+ # (imageFormat(texturesDif), texturesDif, imgMap(t_dif),
+ # mappingDif, safety(material_finish)))
+ if texturesNorm != "":
+ ## scale 1 rotate y*0
# POV-Ray "scale" is not a number of repetitions factor, but its
# inverse, a standard scale factor.
# Offset seems needed relatively to scale so probably center of the
# scale is not the same in blender and POV
- mappingSpec =imgMapTransforms(t_spec)
- # mappingSpec = "translate <%.4g,%.4g,%.4g> scale <%.4g,%.4g,%.4g>\n" % \
- # (-t_spec.offset.x, t_spec.offset.y, t_spec.offset.z,
- # 1.0 / t_spec.scale.x, 1.0 / t_spec.scale.y,
- # 1.0 / t_spec.scale.z)
- tabWrite("uv_mapping image_map{%s \"%s\" %s}\n" % \
- (imageFormat(texturesSpec), texturesSpec, imgMap(t_spec)))
- tabWrite("%s\n" % mappingSpec)
- tabWrite("}\n")
- tabWrite("texture_map {\n")
- tabWrite("[0 \n")
+ mappingNor =imgMapTransforms(t_nor)
+ # mappingNor = " translate <%.4g,%.4g,%.4g> scale <%.4g,%.4g,%.4g>" % \
+ # (-t_nor.offset.x, -t_nor.offset.y, t_nor.offset.z,
+ # 1.0 / t_nor.scale.x, 1.0 / t_nor.scale.y,
+ # 1.0 / t_nor.scale.z)
+ #imageMapNor = ("{bump_map {%s \"%s\" %s mapping}" % \
+ # (imageFormat(texturesNorm),texturesNorm,imgMap(t_nor)))
+ #We were not using the above maybe we should?
+ if texturesNorm and texturesNorm.startswith("PAT_"):
+ tabWrite("normal{function{f%s(x,y,z).grey} bump_size %.4g}\n" %(texturesNorm, t_nor.normal_factor * 10))
+ else:
+ tabWrite("normal {uv_mapping bump_map " \
+ "{%s \"%s\" %s bump_size %.4g }%s}\n" % \
+ (imageFormat(texturesNorm), texturesNorm, imgMap(t_nor),
+ t_nor.normal_factor * 10, mappingNor))
+ if texturesSpec != "":
+ tabWrite("]\n")
+ ##################Second index for mapping specular max value###############
+ tabWrite("[1 \n")
- if texturesDif == "":
+ if texturesDif == "" and mater.pov.replacement_text == "":
if texturesAlpha != "":
- tabWrite("\n")
+ # POV-Ray "scale" is not a number of repetitions factor, but its inverse,
+ # a standard scale factor.
+ # Offset seems needed relatively to scale so probably center of the scale
+ # is not the same in blender and POV
+ # Strange that the translation factor for scale is not the same as for
+ # translate.
+ # TODO: verify both matches with blender internal.
+ mappingAlpha = imgMapTransforms(t_alpha)
+ # mappingAlpha = " translate <%.4g,%.4g,%.4g> scale <%.4g,%.4g,%.4g>\n" % \
+ # (-t_alpha.offset.x, -t_alpha.offset.y, t_alpha.offset.z,
+ # 1.0 / t_alpha.scale.x, 1.0 / t_alpha.scale.y,
+ # 1.0 / t_alpha.scale.z)
if texturesAlpha and texturesAlpha.startswith("PAT_"):
tabWrite("function{f%s(x,y,z).transmit}\n" %texturesAlpha)
else:
- # POV-Ray "scale" is not a number of repetitions factor, but its
- # inverse, a standard scale factor.
- # Offset seems needed relatively to scale so probably center of the
- # scale is not the same in blender and POV
- mappingAlpha = imgMapTransforms(t_alpha)
- # mappingAlpha = " translate <%.4g, %.4g, %.4g> " \
- # "scale <%.4g, %.4g, %.4g>\n" % \
- # (-t_alpha.offset.x, -t_alpha.offset.y,
- # t_alpha.offset.z, 1.0 / t_alpha.scale.x,
- # 1.0 / t_alpha.scale.y, 1.0 / t_alpha.scale.z)
tabWrite("pigment {pigment_pattern {uv_mapping image_map" \
- "{%s \"%s\" %s}%s" % \
- (imageFormat(texturesAlpha), texturesAlpha,
- imgMap(t_alpha), mappingAlpha))
- tabWrite("}\n")
+ "{%s \"%s\" %s}%s}\n" % \
+ (imageFormat(texturesAlpha), texturesAlpha, imgMap(t_alpha),
+ mappingAlpha))
tabWrite("pigment_map {\n")
tabWrite("[0 color rgbft<0,0,0,1,1>]\n")
tabWrite("[1 color rgbft<%.3g, %.3g, %.3g, %.3g, %.3g>]\n" % \
@@ -2306,441 +3495,294 @@ def write_pov(filename, scene=None, info_callback=None):
tabWrite("}\n")
else:
-
tabWrite("pigment {rgbft<%.3g, %.3g, %.3g, %.3g, %.3g>}\n" % \
(col[0], col[1], col[2], povFilter, trans))
-
+
+
if texturesSpec != "":
+ # Level 3 is full specular
+ tabWrite("finish {%s}\n" % (safety(material_finish, Level=3)))
+
+ elif colored_specular_found:
# Level 1 is no specular
tabWrite("finish {%s}\n" % (safety(material_finish, Level=1)))
else:
- # Level 2 is translated spec
+ # Level 2 is translated specular
tabWrite("finish {%s}\n" % (safety(material_finish, Level=2)))
- else:
+ elif mater.pov.replacement_text == "":
mappingDif = imgMapTransforms(t_dif)
-
+ # mappingDif = ("scale <%.4g,%.4g,%.4g> translate <%.4g,%.4g,%.4g>" % \
+ # ( 1.0 / t_dif.scale.x,
+ # 1.0 / t_dif.scale.y,
+ # 1.0 / t_dif.scale.z,
+ # 0.5-(0.5/t_dif.scale.x) + t_dif.offset.x,
+ # 0.5-(0.5/t_dif.scale.y) + t_dif.offset.y,
+ # 0.5-(0.5/t_dif.scale.z) + t_dif.offset.z))
if texturesAlpha != "":
+ # Strange that the translation factor for scale is not the same as for
+ # translate.
+ # TODO: verify both matches with blender internal.
mappingAlpha = imgMapTransforms(t_alpha)
- # mappingAlpha = " translate <%.4g,%.4g,%.4g> " \
- # "scale <%.4g,%.4g,%.4g>" % \
- # (-t_alpha.offset.x, -t_alpha.offset.y,
- # t_alpha.offset.z, 1.0 / t_alpha.scale.x,
- # 1.0 / t_alpha.scale.y, 1.0 / t_alpha.scale.z)
- tabWrite("pigment {\n")
- tabWrite("pigment_pattern {\n")
+ # mappingAlpha = "translate <%.4g,%.4g,%.4g> scale <%.4g,%.4g,%.4g>" % \
+ # (-t_alpha.offset.x, -t_alpha.offset.y, t_alpha.offset.z,
+ # 1.0 / t_alpha.scale.x, 1.0 / t_alpha.scale.y,
+ # 1.0 / t_alpha.scale.z)
if texturesAlpha and texturesAlpha.startswith("PAT_"):
- tabWrite("function{f%s(x,y,z).transmit}\n" %texturesAlpha)
+ tabWrite("pigment{pigment_pattern {function{f%s(x,y,z).transmit}}\n" %texturesAlpha)
else:
- tabWrite("uv_mapping image_map{%s \"%s\" %s}%s}\n" % \
- (imageFormat(texturesAlpha), texturesAlpha,
- imgMap(t_alpha), mappingAlpha))
+ tabWrite("pigment {pigment_pattern {uv_mapping image_map" \
+ "{%s \"%s\" %s}%s}\n" % \
+ (imageFormat(texturesAlpha), texturesAlpha, imgMap(t_alpha),
+ mappingAlpha))
tabWrite("pigment_map {\n")
tabWrite("[0 color rgbft<0,0,0,1,1>]\n")
- #if texturesAlpha and texturesAlpha.startswith("PAT_"):
- #tabWrite("[1 pigment{%s}]\n" %texturesDif)
- if texturesDif and not texturesDif.startswith("PAT_"):
+ if texturesAlpha and texturesAlpha.startswith("PAT_"):
+ tabWrite("[1 function{f%s(x,y,z).transmit}]\n" %texturesAlpha)
+ elif texturesDif and not texturesDif.startswith("PAT_"):
tabWrite("[1 uv_mapping image_map {%s \"%s\" %s} %s]\n" % \
(imageFormat(texturesDif), texturesDif,
- (imgGamma + imgMap(t_dif)), mappingDif))
+ (imgMap(t_dif) + imgGamma), mappingDif))
elif texturesDif and texturesDif.startswith("PAT_"):
- tabWrite("[1 %s]\n" %texturesDif)
+ tabWrite("[1 %s]\n" %texturesDif)
tabWrite("}\n")
tabWrite("}\n")
- if texturesAlpha and texturesAlpha.startswith("PAT_"):
- tabWrite("}\n")
else:
if texturesDif and texturesDif.startswith("PAT_"):
tabWrite("pigment{%s}\n" %texturesDif)
- else:
- tabWrite("pigment {uv_mapping image_map {%s \"%s\" %s}%s}\n" % \
- (imageFormat(texturesDif), texturesDif,
- (imgGamma + imgMap(t_dif)), mappingDif))
-
+ else:
+ tabWrite("pigment {\n")
+ tabWrite("uv_mapping image_map {\n")
+ #tabWrite("%s \"%s\" %s}%s\n" % \
+ # (imageFormat(texturesDif), texturesDif,
+ # (imgGamma + imgMap(t_dif)),mappingDif))
+ tabWrite("%s \"%s\" \n" % (imageFormat(texturesDif), texturesDif))
+ tabWrite("%s\n" % (imgGamma + imgMap(t_dif)))
+ tabWrite("}\n")
+ tabWrite("%s\n" % mappingDif)
+ tabWrite("}\n")
+
if texturesSpec != "":
- # Level 1 is no specular
- tabWrite("finish {%s}\n" % (safety(material_finish, Level=1)))
-
+ # Level 3 is full specular
+ tabWrite("finish {%s}\n" % (safety(material_finish, Level=3)))
else:
# Level 2 is translated specular
tabWrite("finish {%s}\n" % (safety(material_finish, Level=2)))
## scale 1 rotate y*0
- #imageMap = ("{image_map {%s \"%s\" %s }\n" % \
- # (imageFormat(textures),textures,imgMap(t_dif)))
- #tabWrite("uv_mapping pigment %s} %s finish {%s}\n" % \
- # (imageMap,mapping,safety(material_finish)))
- #tabWrite("pigment {uv_mapping image_map {%s \"%s\" %s}%s} " \
- # "finish {%s}\n" % \
- # (imageFormat(texturesDif), texturesDif, imgMap(t_dif),
- # mappingDif, safety(material_finish)))
- if texturesNorm != "":
+ #imageMap = ("{image_map {%s \"%s\" %s }" % \
+ # (imageFormat(textures), textures,imgMap(t_dif)))
+ #file.write("\n\t\t\tuv_mapping pigment %s} %s finish {%s}" % \
+ # (imageMap, mapping, safety(material_finish)))
+ #file.write("\n\t\t\tpigment {uv_mapping image_map " \
+ # "{%s \"%s\" %s}%s} finish {%s}" % \
+ # (imageFormat(texturesDif), texturesDif,imgMap(t_dif),
+ # mappingDif, safety(material_finish)))
+ if texturesNorm != "" and mater.pov.replacement_text == "":
## scale 1 rotate y*0
- # POV-Ray "scale" is not a number of repetitions factor, but its
- # inverse, a standard scale factor.
- # Offset seems needed relatively to scale so probably center of the
- # scale is not the same in blender and POV
+ # POV-Ray "scale" is not a number of repetitions factor, but its inverse,
+ # a standard scale factor.
+ # Offset seems needed relatively to scale so probably center of the scale is
+ # not the same in blender and POV
mappingNor =imgMapTransforms(t_nor)
- # mappingNor = " translate <%.4g,%.4g,%.4g> scale <%.4g,%.4g,%.4g>" % \
- # (-t_nor.offset.x, -t_nor.offset.y, t_nor.offset.z,
- # 1.0 / t_nor.scale.x, 1.0 / t_nor.scale.y,
- # 1.0 / t_nor.scale.z)
+ # mappingNor = (" translate <%.4g,%.4g,%.4g> scale <%.4g,%.4g,%.4g>" % \
+ # (-t_nor.offset.x, -t_nor.offset.y, t_nor.offset.z,
+ # 1.0 / t_nor.scale.x, 1.0 / t_nor.scale.y, 1.0 / t_nor.scale.z))
#imageMapNor = ("{bump_map {%s \"%s\" %s mapping}" % \
# (imageFormat(texturesNorm),texturesNorm,imgMap(t_nor)))
#We were not using the above maybe we should?
if texturesNorm and texturesNorm.startswith("PAT_"):
- tabWrite("normal{function{f%s(x,y,z).grey} bump_size %.4g}\n" %(texturesNorm, t_nor.normal_factor * 10))
- else:
- tabWrite("normal {uv_mapping bump_map " \
- "{%s \"%s\" %s bump_size %.4g }%s}\n" % \
+ tabWrite("normal{function{f%s(x,y,z).grey} bump_size %.4g}\n" %(texturesNorm, t_nor.normal_factor * 10))
+ else:
+ tabWrite("normal {uv_mapping bump_map {%s \"%s\" %s bump_size %.4g }%s}\n" % \
(imageFormat(texturesNorm), texturesNorm, imgMap(t_nor),
- t_nor.normal_factor * 10, mappingNor))
- if texturesSpec != "":
+ t_nor.normal_factor * 10.0, mappingNor))
+ if texturesSpec != "" and mater.pov.replacement_text == "":
tabWrite("]\n")
- ##################Second index for mapping specular max value###############
- tabWrite("[1 \n")
- if texturesDif == "" and mater.pov.replacement_text == "":
- if texturesAlpha != "":
- # POV-Ray "scale" is not a number of repetitions factor, but its inverse,
- # a standard scale factor.
- # Offset seems needed relatively to scale so probably center of the scale
- # is not the same in blender and POV
- # Strange that the translation factor for scale is not the same as for
- # translate.
- # TODO: verify both matches with blender internal.
- mappingAlpha = imgMapTransforms(t_alpha)
- # mappingAlpha = " translate <%.4g,%.4g,%.4g> scale <%.4g,%.4g,%.4g>\n" % \
- # (-t_alpha.offset.x, -t_alpha.offset.y, t_alpha.offset.z,
- # 1.0 / t_alpha.scale.x, 1.0 / t_alpha.scale.y,
- # 1.0 / t_alpha.scale.z)
- if texturesAlpha and texturesAlpha.startswith("PAT_"):
- tabWrite("function{f%s(x,y,z).transmit}\n" %texturesAlpha)
- else:
- tabWrite("pigment {pigment_pattern {uv_mapping image_map" \
- "{%s \"%s\" %s}%s}\n" % \
- (imageFormat(texturesAlpha), texturesAlpha, imgMap(t_alpha),
- mappingAlpha))
- tabWrite("pigment_map {\n")
- tabWrite("[0 color rgbft<0,0,0,1,1>]\n")
- tabWrite("[1 color rgbft<%.3g, %.3g, %.3g, %.3g, %.3g>]\n" % \
- (col[0], col[1], col[2], povFilter, trans))
tabWrite("}\n")
- tabWrite("}\n")
-
- else:
- tabWrite("pigment {rgbft<%.3g, %.3g, %.3g, %.3g, %.3g>}\n" % \
- (col[0], col[1], col[2], povFilter, trans))
-
-
- if texturesSpec != "":
- # Level 3 is full specular
- tabWrite("finish {%s}\n" % (safety(material_finish, Level=3)))
-
- elif colored_specular_found:
- # Level 1 is no specular
- tabWrite("finish {%s}\n" % (safety(material_finish, Level=1)))
- else:
- # Level 2 is translated specular
- tabWrite("finish {%s}\n" % (safety(material_finish, Level=2)))
-
- elif mater.pov.replacement_text == "":
- mappingDif = imgMapTransforms(t_dif)
- # mappingDif = ("scale <%.4g,%.4g,%.4g> translate <%.4g,%.4g,%.4g>" % \
- # ( 1.0 / t_dif.scale.x,
- # 1.0 / t_dif.scale.y,
- # 1.0 / t_dif.scale.z,
- # 0.5-(0.5/t_dif.scale.x) + t_dif.offset.x,
- # 0.5-(0.5/t_dif.scale.y) + t_dif.offset.y,
- # 0.5-(0.5/t_dif.scale.z) + t_dif.offset.z))
- if texturesAlpha != "":
- # Strange that the translation factor for scale is not the same as for
- # translate.
- # TODO: verify both matches with blender internal.
- mappingAlpha = imgMapTransforms(t_alpha)
- # mappingAlpha = "translate <%.4g,%.4g,%.4g> scale <%.4g,%.4g,%.4g>" % \
- # (-t_alpha.offset.x, -t_alpha.offset.y, t_alpha.offset.z,
- # 1.0 / t_alpha.scale.x, 1.0 / t_alpha.scale.y,
- # 1.0 / t_alpha.scale.z)
- if texturesAlpha and texturesAlpha.startswith("PAT_"):
- tabWrite("pigment{pigment_pattern {function{f%s(x,y,z).transmit}}\n" %texturesAlpha)
- else:
- tabWrite("pigment {pigment_pattern {uv_mapping image_map" \
- "{%s \"%s\" %s}%s}\n" % \
- (imageFormat(texturesAlpha), texturesAlpha, imgMap(t_alpha),
- mappingAlpha))
- tabWrite("pigment_map {\n")
- tabWrite("[0 color rgbft<0,0,0,1,1>]\n")
- if texturesAlpha and texturesAlpha.startswith("PAT_"):
- tabWrite("[1 function{f%s(x,y,z).transmit}]\n" %texturesAlpha)
- elif texturesDif and not texturesDif.startswith("PAT_"):
- tabWrite("[1 uv_mapping image_map {%s \"%s\" %s} %s]\n" % \
- (imageFormat(texturesDif), texturesDif,
- (imgMap(t_dif) + imgGamma), mappingDif))
- elif texturesDif and texturesDif.startswith("PAT_"):
- tabWrite("[1 %s]\n" %texturesDif)
- tabWrite("}\n")
+ #End of slope/ior texture_map
+ if mater.diffuse_shader == 'MINNAERT' and mater.pov.replacement_text == "":
+ tabWrite("]\n")
tabWrite("}\n")
-
- else:
- if texturesDif and texturesDif.startswith("PAT_"):
- tabWrite("pigment{%s}\n" %texturesDif)
- else:
- tabWrite("pigment {\n")
- tabWrite("uv_mapping image_map {\n")
- #tabWrite("%s \"%s\" %s}%s\n" % \
- # (imageFormat(texturesDif), texturesDif,
- # (imgGamma + imgMap(t_dif)),mappingDif))
- tabWrite("%s \"%s\" \n" % (imageFormat(texturesDif), texturesDif))
- tabWrite("%s\n" % (imgGamma + imgMap(t_dif)))
+ if mater.diffuse_shader == 'FRESNEL' and mater.pov.replacement_text == "":
+ c = 1
+ while (c <= lampCount):
+ tabWrite("]\n")
tabWrite("}\n")
- tabWrite("%s\n" % mappingDif)
- tabWrite("}\n")
-
- if texturesSpec != "":
- # Level 3 is full specular
- tabWrite("finish {%s}\n" % (safety(material_finish, Level=3)))
- else:
- # Level 2 is translated specular
- tabWrite("finish {%s}\n" % (safety(material_finish, Level=2)))
-
- ## scale 1 rotate y*0
- #imageMap = ("{image_map {%s \"%s\" %s }" % \
- # (imageFormat(textures), textures,imgMap(t_dif)))
- #file.write("\n\t\t\tuv_mapping pigment %s} %s finish {%s}" % \
- # (imageMap, mapping, safety(material_finish)))
- #file.write("\n\t\t\tpigment {uv_mapping image_map " \
- # "{%s \"%s\" %s}%s} finish {%s}" % \
- # (imageFormat(texturesDif), texturesDif,imgMap(t_dif),
- # mappingDif, safety(material_finish)))
- if texturesNorm != "" and mater.pov.replacement_text == "":
- ## scale 1 rotate y*0
- # POV-Ray "scale" is not a number of repetitions factor, but its inverse,
- # a standard scale factor.
- # Offset seems needed relatively to scale so probably center of the scale is
- # not the same in blender and POV
- mappingNor =imgMapTransforms(t_nor)
- # mappingNor = (" translate <%.4g,%.4g,%.4g> scale <%.4g,%.4g,%.4g>" % \
- # (-t_nor.offset.x, -t_nor.offset.y, t_nor.offset.z,
- # 1.0 / t_nor.scale.x, 1.0 / t_nor.scale.y, 1.0 / t_nor.scale.z))
- #imageMapNor = ("{bump_map {%s \"%s\" %s mapping}" % \
- # (imageFormat(texturesNorm),texturesNorm,imgMap(t_nor)))
- #We were not using the above maybe we should?
- if texturesNorm and texturesNorm.startswith("PAT_"):
- tabWrite("normal{function{f%s(x,y,z).grey} bump_size %.4g}\n" %(texturesNorm, t_nor.normal_factor * 10))
- else:
- tabWrite("normal {uv_mapping bump_map {%s \"%s\" %s bump_size %.4g }%s}\n" % \
- (imageFormat(texturesNorm), texturesNorm, imgMap(t_nor),
- t_nor.normal_factor * 10.0, mappingNor))
- if texturesSpec != "" and mater.pov.replacement_text == "":
- tabWrite("]\n")
-
- tabWrite("}\n")
+ c += 1
- #End of slope/ior texture_map
- if mater.diffuse_shader == 'MINNAERT' and mater.pov.replacement_text == "":
- tabWrite("]\n")
+
+
+ # Close first layer of POV "texture" (Blender material)
tabWrite("}\n")
- if mater.diffuse_shader == 'FRESNEL' and mater.pov.replacement_text == "":
- c = 1
- while (c <= lampCount):
- tabWrite("]\n")
- tabWrite("}\n")
- c += 1
-
-
-
- # Close first layer of POV "texture" (Blender material)
- tabWrite("}\n")
-
- if (mater.specular_color.s > 0.0):
- colored_specular_found = True
- else:
- colored_specular_found = False
- # Write another layered texture using invisible diffuse and metallic trick
- # to emulate colored specular highlights
- special_texture_found = False
- for t in mater.texture_slots:
- if(t and t.use and ((t.texture.type == 'IMAGE' and t.texture.image) or t.texture.type != 'IMAGE') and
- (t.use_map_specular or t.use_map_raymir)):
- # Specular mapped textures would conflict with colored specular
- # because POV can't layer over or under pigment patterned textures
- special_texture_found = True
-
- if colored_specular_found and not special_texture_found:
- if comments:
- file.write(" // colored highlights with a stransparent metallic layer\n")
+ if (mater.specular_color.s > 0.0):
+ colored_specular_found = True
else:
- tabWrite("\n")
-
- tabWrite("texture {\n")
- tabWrite("pigment {rgbft<%.3g, %.3g, %.3g, 0, 1>}\n" % \
- (mater.specular_color[0], mater.specular_color[1], mater.specular_color[2]))
- tabWrite("finish {%s}\n" % (safety(material_finish, Level=2))) # Level 2 is translated spec
-
- texturesNorm = ""
+ colored_specular_found = False
+
+ # Write another layered texture using invisible diffuse and metallic trick
+ # to emulate colored specular highlights
+ special_texture_found = False
for t in mater.texture_slots:
+ if(t and t.use and ((t.texture.type == 'IMAGE' and t.texture.image) or t.texture.type != 'IMAGE') and
+ (t.use_map_specular or t.use_map_raymir)):
+ # Specular mapped textures would conflict with colored specular
+ # because POV can't layer over or under pigment patterned textures
+ special_texture_found = True
+
+ if colored_specular_found and not special_texture_found:
+ if comments:
+ file.write(" // colored highlights with a stransparent metallic layer\n")
+ else:
+ tabWrite("\n")
+
+ tabWrite("texture {\n")
+ tabWrite("pigment {rgbft<%.3g, %.3g, %.3g, 0, 1>}\n" % \
+ (mater.specular_color[0], mater.specular_color[1], mater.specular_color[2]))
+ tabWrite("finish {%s}\n" % (safety(material_finish, Level=2))) # Level 2 is translated spec
+
+ texturesNorm = ""
+ for t in mater.texture_slots:
+
+ if t and t.texture.pov.tex_pattern_type != 'emulator':
+ proceduralFlag=True
+ image_filename = string_strip_hyphen(bpy.path.clean_name(t.texture.name))
+ if (t and t.texture.type == 'IMAGE' and
+ t.use and t.texture.image and
+ t.texture.pov.tex_pattern_type == 'emulator'):
+ proceduralFlag=False
+ image_filename = path_image(t.texture.image)
+ imgGamma = ""
+ if image_filename:
+ if t.use_map_normal:
+ texturesNorm = image_filename
+ # colvalue = t.normal_factor * 10.0 # UNUSED
+ #textNormName=t.texture.image.name + ".normal"
+ #was the above used? --MR
+ t_nor = t
+ if proceduralFlag:
+ tabWrite("normal{function" \
+ "{f%s(x,y,z).grey} bump_size %.4g}\n" % \
+ (texturesNorm,
+ t_nor.normal_factor * 10))
+ else:
+ tabWrite("normal {uv_mapping bump_map " \
+ "{%s \"%s\" %s bump_size %.4g }%s}\n" % \
+ (imageFormat(texturesNorm),
+ texturesNorm, imgMap(t_nor),
+ t_nor.normal_factor * 10,
+ mappingNor))
+
+ tabWrite("}\n") # THEN IT CAN CLOSE LAST LAYER OF TEXTURE
+
+
+ ###################################################################
+ index[0] = idx
+ idx += 1
+
- if t and t.texture.pov.tex_pattern_type != 'emulator':
- proceduralFlag=True
- image_filename = string_strip_hyphen(bpy.path.clean_name(t.texture.name))
- if t and t.texture.type == 'IMAGE' and t.use and t.texture.image and t.texture.pov.tex_pattern_type == 'emulator':
- proceduralFlag=False
- image_filename = path_image(t.texture.image)
- imgGamma = ""
- if image_filename:
- if t.use_map_normal:
- texturesNorm = image_filename
- # colvalue = t.normal_factor * 10.0 # UNUSED
- #textNormName=t.texture.image.name + ".normal"
- #was the above used? --MR
- t_nor = t
- if proceduralFlag:
- tabWrite("normal{function{f%s(x,y,z).grey} bump_size %.4g}\n" %(texturesNorm, t_nor.normal_factor * 10))
- else:
- tabWrite("normal {uv_mapping bump_map " \
- "{%s \"%s\" %s bump_size %.4g }%s}\n" % \
- (imageFormat(texturesNorm), texturesNorm, imgMap(t_nor),
- t_nor.normal_factor * 10, mappingNor))
-
- tabWrite("}\n") # THEN IT CAN CLOSE LAST LAYER OF TEXTURE --MR
-
-
- ####################################################################################
- index[0] = idx
- idx += 1
-
-
-
-
- # Vert Colors
- tabWrite("texture_list {\n")
- # In case there's is no material slot, give at least one texture (empty so it uses pov default)
- if len(vertCols)==0:
- file.write(tabStr + "1")
- else:
- file.write(tabStr + "%s" % (len(vertCols))) # vert count
- if material is not None:
- if material.pov.replacement_text != "":
- file.write("\n")
- file.write(" texture{%s}\n" % material.pov.replacement_text)
+
+ # Vert Colors
+ tabWrite("texture_list {\n")
+ # In case there's is no material slot, give at least one texture
+ #(an empty one so it uses pov default)
+ if len(vertCols)==0:
+ file.write(tabStr + "1")
else:
- # Loop through declared materials list
- for cMN in LocalMaterialNames:
- if material != "Default":
- file.write("\n texture{MAT_%s}\n" % cMN)#string_strip_hyphen(materialNames[material])) # Something like that
- else:
- file.write(" texture{}\n")
- tabWrite("}\n")
-
- # Face indices
- tabWrite("face_indices {\n")
- tabWrite("%d" % (len(me_faces) + quadCount)) # faces count
- tabStr = tab * tabLevel
+ file.write(tabStr + "%s" % (len(vertCols))) # vert count
+
+ # below "material" alias, changed to ob.active_material
+ # because variable referenced before assignment
+ if ob.active_material is not None:
+ if material.pov.replacement_text != "":
+ file.write("\n")
+ file.write(" texture{%s}\n" % material.pov.replacement_text)
- for fi, f in enumerate(me_faces):
- fv = faces_verts[fi]
- material_index = f.material_index
- if len(fv) == 4:
- indices = (0, 1, 2), (0, 2, 3)
+ else:
+ # Loop through declared materials list
+ for cMN in LocalMaterialNames:
+ if material != "Default":
+ file.write("\n texture{MAT_%s}\n" % cMN)
+ #use string_strip_hyphen(materialNames[material]))
+ #or Something like that to clean up the above?
else:
- indices = ((0, 1, 2),)
+ file.write(" texture{}\n")
+ tabWrite("}\n")
- if vcol_layer:
- col = vcol_layer[fi]
+ # Face indices
+ tabWrite("face_indices {\n")
+ tabWrite("%d" % (len(me_faces) + quadCount)) # faces count
+ tabStr = tab * tabLevel
+ for fi, f in enumerate(me_faces):
+ fv = faces_verts[fi]
+ material_index = f.material_index
if len(fv) == 4:
- cols = col.color1, col.color2, col.color3, col.color4
+ indices = (0, 1, 2), (0, 2, 3)
else:
- cols = col.color1, col.color2, col.color3
-
- if not me_materials or me_materials[material_index] is None: # No materials
- for i1, i2, i3 in indices:
- if linebreaksinlists:
- file.write(",\n")
- # vert count
- file.write(tabStr + "<%d,%d,%d>" % (fv[i1], fv[i2], fv[i3]))
- else:
- file.write(", ")
- file.write("<%d,%d,%d>" % (fv[i1], fv[i2], fv[i3])) # vert count
- else:
- material = me_materials[material_index]
- for i1, i2, i3 in indices:
- if me.vertex_colors: #and material.use_vertex_color_paint:
- # Color per vertex - vertex color
+ indices = ((0, 1, 2),)
- col1 = cols[i1]
- col2 = cols[i2]
- col3 = cols[i3]
+ if vcol_layer:
+ col = vcol_layer[fi]
- ci1 = vertCols[col1[0], col1[1], col1[2], material_index][0]
- ci2 = vertCols[col2[0], col2[1], col2[2], material_index][0]
- ci3 = vertCols[col3[0], col3[1], col3[2], material_index][0]
+ if len(fv) == 4:
+ cols = col.color1, col.color2, col.color3, col.color4
else:
- # Color per material - flat material color
- if material.subsurface_scattering.use:
- diffuse_color = [i * j for i, j in zip(material.subsurface_scattering.color[:], material.diffuse_color[:])]
+ cols = col.color1, col.color2, col.color3
+
+ if not me_materials or me_materials[material_index] is None: # No materials
+ for i1, i2, i3 in indices:
+ if linebreaksinlists:
+ file.write(",\n")
+ # vert count
+ file.write(tabStr + "<%d,%d,%d>" % (fv[i1], fv[i2], fv[i3]))
else:
- diffuse_color = material.diffuse_color[:]
- ci1 = ci2 = ci3 = vertCols[diffuse_color[0], diffuse_color[1], \
- diffuse_color[2], f.material_index][0]
-
- if linebreaksinlists:
- file.write(",\n")
- file.write(tabStr + "<%d,%d,%d>, %d,%d,%d" % \
- (fv[i1], fv[i2], fv[i3], ci1, ci2, ci3)) # vert count
- else:
- file.write(", ")
- file.write("<%d,%d,%d>, %d,%d,%d" % \
- (fv[i1], fv[i2], fv[i3], ci1, ci2, ci3)) # vert count
-
- file.write("\n")
- tabWrite("}\n")
-
- # normal_indices indices
- tabWrite("normal_indices {\n")
- tabWrite("%d" % (len(me_faces) + quadCount)) # faces count
- tabStr = tab * tabLevel
- for fi, fv in enumerate(faces_verts):
-
- if len(fv) == 4:
- indices = (0, 1, 2), (0, 2, 3)
- else:
- indices = ((0, 1, 2),)
-
- for i1, i2, i3 in indices:
- if me_faces[fi].use_smooth:
- if linebreaksinlists:
- file.write(",\n")
- file.write(tabStr + "<%d,%d,%d>" %\
- (uniqueNormals[verts_normals[fv[i1]]][0],\
- uniqueNormals[verts_normals[fv[i2]]][0],\
- uniqueNormals[verts_normals[fv[i3]]][0])) # vert count
- else:
- file.write(", ")
- file.write("<%d,%d,%d>" %\
- (uniqueNormals[verts_normals[fv[i1]]][0],\
- uniqueNormals[verts_normals[fv[i2]]][0],\
- uniqueNormals[verts_normals[fv[i3]]][0])) # vert count
+ file.write(", ")
+ file.write("<%d,%d,%d>" % (fv[i1], fv[i2], fv[i3])) # vert count
else:
- idx = uniqueNormals[faces_normals[fi]][0]
- if linebreaksinlists:
- file.write(",\n")
- file.write(tabStr + "<%d,%d,%d>" % (idx, idx, idx)) # vert count
- else:
- file.write(", ")
- file.write("<%d,%d,%d>" % (idx, idx, idx)) # vert count
+ material = me_materials[material_index]
+ for i1, i2, i3 in indices:
+ if me.vertex_colors: #and material.use_vertex_color_paint:
+ # Color per vertex - vertex color
+
+ col1 = cols[i1]
+ col2 = cols[i2]
+ col3 = cols[i3]
+
+ ci1 = vertCols[col1[0], col1[1], col1[2], material_index][0]
+ ci2 = vertCols[col2[0], col2[1], col2[2], material_index][0]
+ ci3 = vertCols[col3[0], col3[1], col3[2], material_index][0]
+ else:
+ # Color per material - flat material color
+ if material.subsurface_scattering.use:
+ diffuse_color = [i * j for i, j in
+ zip(material.subsurface_scattering.color[:],
+ material.diffuse_color[:])]
+ else:
+ diffuse_color = material.diffuse_color[:]
+ ci1 = ci2 = ci3 = vertCols[diffuse_color[0], diffuse_color[1], \
+ diffuse_color[2], f.material_index][0]
+
+ if linebreaksinlists:
+ file.write(",\n")
+ file.write(tabStr + "<%d,%d,%d>, %d,%d,%d" % \
+ (fv[i1], fv[i2], fv[i3], ci1, ci2, ci3)) # vert count
+ else:
+ file.write(", ")
+ file.write("<%d,%d,%d>, %d,%d,%d" % \
+ (fv[i1], fv[i2], fv[i3], ci1, ci2, ci3)) # vert count
- file.write("\n")
- tabWrite("}\n")
+ file.write("\n")
+ tabWrite("}\n")
- if uv_layer:
- tabWrite("uv_indices {\n")
+ # normal_indices indices
+ tabWrite("normal_indices {\n")
tabWrite("%d" % (len(me_faces) + quadCount)) # faces count
tabStr = tab * tabLevel
for fi, fv in enumerate(faces_verts):
@@ -2750,44 +3792,81 @@ def write_pov(filename, scene=None, info_callback=None):
else:
indices = ((0, 1, 2),)
- uv = uv_layer[fi]
- if len(faces_verts[fi]) == 4:
- uvs = uv.uv[0][:], uv.uv[1][:], uv.uv[2][:], uv.uv[3][:]
- else:
- uvs = uv.uv[0][:], uv.uv[1][:], uv.uv[2][:]
-
for i1, i2, i3 in indices:
- if linebreaksinlists:
- file.write(",\n")
- file.write(tabStr + "<%d,%d,%d>" % (
- uniqueUVs[uvs[i1]][0],\
- uniqueUVs[uvs[i2]][0],\
- uniqueUVs[uvs[i3]][0]))
+ if me_faces[fi].use_smooth:
+ if linebreaksinlists:
+ file.write(",\n")
+ file.write(tabStr + "<%d,%d,%d>" %\
+ (uniqueNormals[verts_normals[fv[i1]]][0],\
+ uniqueNormals[verts_normals[fv[i2]]][0],\
+ uniqueNormals[verts_normals[fv[i3]]][0])) # vert count
+ else:
+ file.write(", ")
+ file.write("<%d,%d,%d>" %\
+ (uniqueNormals[verts_normals[fv[i1]]][0],\
+ uniqueNormals[verts_normals[fv[i2]]][0],\
+ uniqueNormals[verts_normals[fv[i3]]][0])) # vert count
else:
- file.write(", ")
- file.write("<%d,%d,%d>" % (
- uniqueUVs[uvs[i1]][0],\
- uniqueUVs[uvs[i2]][0],\
- uniqueUVs[uvs[i3]][0]))
+ idx = uniqueNormals[faces_normals[fi]][0]
+ if linebreaksinlists:
+ file.write(",\n")
+ file.write(tabStr + "<%d,%d,%d>" % (idx, idx, idx)) # vertcount
+ else:
+ file.write(", ")
+ file.write("<%d,%d,%d>" % (idx, idx, idx)) # vert count
file.write("\n")
tabWrite("}\n")
- if me.materials:
- try:
- material = me.materials[0] # dodgy
- writeObjectMaterial(material, ob)
- except IndexError:
- print(me)
+ if uv_layer:
+ tabWrite("uv_indices {\n")
+ tabWrite("%d" % (len(me_faces) + quadCount)) # faces count
+ tabStr = tab * tabLevel
+ for fi, fv in enumerate(faces_verts):
- #Importance for radiosity sampling added here:
- tabWrite("radiosity { \n")
- tabWrite("importance %3g \n" % importance)
- tabWrite("}\n")
+ if len(fv) == 4:
+ indices = (0, 1, 2), (0, 2, 3)
+ else:
+ indices = ((0, 1, 2),)
- tabWrite("}\n") # End of mesh block
+ uv = uv_layer[fi]
+ if len(faces_verts[fi]) == 4:
+ uvs = uv.uv[0][:], uv.uv[1][:], uv.uv[2][:], uv.uv[3][:]
+ else:
+ uvs = uv.uv[0][:], uv.uv[1][:], uv.uv[2][:]
+
+ for i1, i2, i3 in indices:
+ if linebreaksinlists:
+ file.write(",\n")
+ file.write(tabStr + "<%d,%d,%d>" % (
+ uniqueUVs[uvs[i1]][0],\
+ uniqueUVs[uvs[i2]][0],\
+ uniqueUVs[uvs[i3]][0]))
+ else:
+ file.write(", ")
+ file.write("<%d,%d,%d>" % (
+ uniqueUVs[uvs[i1]][0],\
+ uniqueUVs[uvs[i2]][0],\
+ uniqueUVs[uvs[i3]][0]))
- bpy.data.meshes.remove(me)
+ file.write("\n")
+ tabWrite("}\n")
+
+ if me.materials:
+ try:
+ material = me.materials[0] # dodgy
+ writeObjectMaterial(material, ob)
+ except IndexError:
+ print(me)
+
+ #Importance for radiosity sampling added here:
+ tabWrite("radiosity { \n")
+ tabWrite("importance %3g \n" % importance)
+ tabWrite("}\n")
+
+ tabWrite("}\n") # End of mesh block
+
+ bpy.data.meshes.remove(me)
for data_name, inst in data_ref.items():
for ob_name, matrix_str in inst:
@@ -2839,7 +3918,7 @@ def write_pov(filename, scene=None, info_callback=None):
# Commented below was an idea to make the Background image oriented as camera
# taken here:
-#http://news.povray.org/povray.newusers/thread/%3Cweb.4a5cddf4e9c9822ba2f93e20@news.povray.org%3E/
+#http://news.pov.org/pov.newusers/thread/%3Cweb.4a5cddf4e9c9822ba2f93e20@news.pov.org%3E/
# Replace 4/3 by the ratio of each image found by some custom or existing
# function
#mappingBlend = (" translate <%.4g,%.4g,%.4g> rotate z*degrees" \
@@ -2862,8 +3941,8 @@ def write_pov(filename, scene=None, info_callback=None):
# Further Scale by 2 and translate by -1 are
# required for the sky_sphere not to repeat
- mappingBlend = "scale 2 scale <%.4g,%.4g,%.4g> translate -1 translate <%.4g,%.4g,%.4g> " \
- "rotate<0,0,0> " % \
+ mappingBlend = "scale 2 scale <%.4g,%.4g,%.4g> translate -1 " \
+ "translate <%.4g,%.4g,%.4g> rotate<0,0,0> " % \
((1.0 / t_blend.scale.x),
(1.0 / t_blend.scale.y),
(1.0 / t_blend.scale.z),
@@ -2933,7 +4012,7 @@ def write_pov(filename, scene=None, info_callback=None):
tabWrite("fog {\n")
tabWrite("distance %.6f\n" % mist.depth)
tabWrite("color rgbt<%.3g, %.3g, %.3g, %.3g>\n" % \
- (world.horizon_color[:] + (1.0 - mist.intensity,)))
+ (*world.horizon_color, 1.0 - mist.intensity))
#tabWrite("fog_offset %.6f\n" % mist.start)
#tabWrite("fog_alt 5\n")
#tabWrite("turbulence 0.2\n")
@@ -2977,9 +4056,14 @@ def write_pov(filename, scene=None, info_callback=None):
# In pov, the scale has reversed influence compared to blender. these number
# should correct that
tabWrite("mm_per_unit %.6f\n" % \
- (material.subsurface_scattering.scale * 1000.0))# formerly ...scale * (-100.0) + 15.0))
+ (material.subsurface_scattering.scale * 1000.0))
+ # 1000 rather than scale * (-100.0) + 15.0))
+
# In POV-Ray, the scale factor for all subsurface shaders needs to be the same
- sslt_samples = (11 - material.subsurface_scattering.error_threshold) * 10 # formerly ...*100
+
+ # formerly sslt_samples were multiplied by 100 instead of 10
+ sslt_samples = (11 - material.subsurface_scattering.error_threshold) * 10
+
tabWrite("subsurface { samples %d, %d }\n" % (sslt_samples, sslt_samples / 10))
onceSss = 0
@@ -2987,12 +4071,15 @@ def write_pov(filename, scene=None, info_callback=None):
tabWrite("ambient_light rgbt<%.3g, %.3g, %.3g,1>\n" % world.ambient_color[:])
onceAmbient = 0
- if (material.pov.refraction_type == "2" or material.pov.photons_reflection == True) and oncePhotons:
+ if (oncePhotons and
+ (material.pov.refraction_type == "2" or
+ material.pov.photons_reflection == True)):
tabWrite("photons {\n")
tabWrite("spacing %.6f\n" % scene.pov.photon_spacing)
tabWrite("max_trace_level %d\n" % scene.pov.photon_max_trace_level)
tabWrite("adc_bailout %.3g\n" % scene.pov.photon_adc_bailout)
- tabWrite("gather %d, %d\n" % (scene.pov.photon_gather_min, scene.pov.photon_gather_max))
+ tabWrite("gather %d, %d\n" % (scene.pov.photon_gather_min,
+ scene.pov.photon_gather_max))
tabWrite("}\n")
oncePhotons = 0
@@ -3032,8 +4119,10 @@ def write_pov(filename, scene=None, info_callback=None):
LocalPatternNames = []
for texture in bpy.data.textures: #ok?
if texture.users > 0:
- currentPatName = string_strip_hyphen(bpy.path.clean_name(texture.name)) #string_strip_hyphen(patternNames[texture.name]) #maybe instead
- LocalPatternNames.append(currentPatName) #use this list to prevent writing texture instances several times and assign in mats?
+ currentPatName = string_strip_hyphen(bpy.path.clean_name(texture.name))
+ #string_strip_hyphen(patternNames[texture.name]) #maybe instead of the above
+ LocalPatternNames.append(currentPatName)
+ #use above list to prevent writing texture instances several times and assign in mats?
file.write("\n #declare PAT_%s = \n" % currentPatName)
file.write(exportPattern(texture))
file.write("\n")
@@ -3050,32 +4139,45 @@ def write_pov(filename, scene=None, info_callback=None):
if comments:
file.write("\n//--Lamps--\n\n")
- exportLamps([l for l in sel if l.type == 'LAMP'])
+ exportLamps([L for L in sel if (L.type == 'LAMP' and L.pov.object_as != 'RAINBOW')])
+
+ if comments:
+ file.write("\n//--Rainbows--\n\n")
+ exportRainbows([L for L in sel if (L.type == 'LAMP' and L.pov.object_as == 'RAINBOW')])
+
if comments:
+ file.write("\n//--Special Curves--\n\n")
+ for c in sel:
+ if c.type == 'CURVE' and (c.pov.curveshape in {'lathe','sphere_sweep','loft','birail'}):
+ exportCurves(scene,c)
+
+
+ if comments:
file.write("\n//--Material Definitions--\n\n")
# write a default pigment for objects with no material (comment out to show black)
file.write("#default{ pigment{ color rgb 0.8 }}\n")
# Convert all materials to strings we can access directly per vertex.
#exportMaterials()
- writeMaterial(None) # default material
+ shading.writeMaterial(DEF_MAT_NAME, scene, tabWrite, safety, comments, uniqueName, materialNames, None) # default material
for material in bpy.data.materials:
if material.users > 0:
- writeMaterial(material)
+ shading.writeMaterial(DEF_MAT_NAME, scene, tabWrite, safety, comments, uniqueName, materialNames, material)
if comments:
file.write("\n")
- exportMeta([l for l in sel if l.type == 'META'])
+ exportMeta([m for m in sel if m.type == 'META'])
if comments:
file.write("//--Mesh objects--\n")
exportMeshes(scene, sel)
+
#What follow used to happen here:
#exportCamera()
#exportWorld(scene.world)
#exportGlobalSettings(scene)
- # MR:..and the order was important for an attempt to implement pov 3.7 baking
+ # MR:..and the order was important for implementing pov 3.7 baking
# (mesh camera) comment for the record
# CR: Baking should be a special case than. If "baking", than we could change the order.
@@ -3172,7 +4274,8 @@ class PovrayRender(bpy.types.RenderEngine):
# assume if there is a 64bit binary that the user has a 64bit capable OS
if sys.platform[:3] == "win":
import winreg
- win_reg_key = winreg.OpenKey(winreg.HKEY_CURRENT_USER, "Software\\POV-Ray\\v3.7\\Windows")
+ win_reg_key = winreg.OpenKey(winreg.HKEY_CURRENT_USER,
+ "Software\\POV-Ray\\v3.7\\Windows")
win_home = winreg.QueryValueEx(win_reg_key, "Home")[0]
# First try 64bits UberPOV
@@ -3235,6 +4338,9 @@ class PovrayRender(bpy.types.RenderEngine):
def info_callback(txt):
self.update_stats("", "POV-Ray 3.7: " + txt)
+ # os.makedirs(user_dir, exist_ok=True) # handled with previews
+ os.makedirs(preview_dir, exist_ok=True)
+
write_pov(self._temp_file_in, scene, info_callback)
def _render(self, scene):
@@ -3572,30 +4678,29 @@ class PovrayRender(bpy.types.RenderEngine):
self._cleanup()
-
-#################################Operators#########################################
+##################################################################################
+#################################Operators########################################
+##################################################################################
class RenderPovTexturePreview(Operator):
bl_idname = "tex.preview_update"
bl_label = "Update preview"
def execute(self, context):
tex=bpy.context.object.active_material.active_texture #context.texture
texPrevName=string_strip_hyphen(bpy.path.clean_name(tex.name))+"_prev"
- workDir=os.path.dirname(__file__)
- previewDir=os.path.join(workDir, "preview")
-
+
## Make sure Preview directory exists and is empty
- if not os.path.isdir(previewDir):
- os.mkdir(previewDir)
-
- iniPrevFile=os.path.join(previewDir, "Preview.ini")
- inputPrevFile=os.path.join(previewDir, "Preview.pov")
- outputPrevFile=os.path.join(previewDir, texPrevName)
+ if not os.path.isdir(preview_dir):
+ os.mkdir(preview_dir)
+
+ iniPrevFile=os.path.join(preview_dir, "Preview.ini")
+ inputPrevFile=os.path.join(preview_dir, "Preview.pov")
+ outputPrevFile=os.path.join(preview_dir, texPrevName)
##################### ini ##########################################
fileIni=open("%s"%iniPrevFile,"w")
fileIni.write('Version=3.7\n')
fileIni.write('Input_File_Name="%s"\n'%inputPrevFile)
fileIni.write('Output_File_Name="%s.png"\n'%outputPrevFile)
- fileIni.write('Library_Path="%s"\n'%previewDir)
+ fileIni.write('Library_Path="%s"\n' % preview_dir)
fileIni.write('Width=256\n')
fileIni.write('Height=256\n')
fileIni.write('Pause_When_Done=0\n')
@@ -3640,9 +4745,11 @@ class RenderPovTexturePreview(Operator):
pov_binary = PovrayRender._locate_binary()
if sys.platform[:3] == "win":
- p1=subprocess.Popen(["%s"%pov_binary,"/EXIT","%s"%iniPrevFile],stdout=subprocess.PIPE,stderr=subprocess.STDOUT)
+ p1=subprocess.Popen(["%s"%pov_binary,"/EXIT","%s"%iniPrevFile],
+ stdout=subprocess.PIPE,stderr=subprocess.STDOUT)
else:
- p1=subprocess.Popen(["%s"%pov_binary,"-d","%s"%iniPrevFile],stdout=subprocess.PIPE,stderr=subprocess.STDOUT)
+ p1=subprocess.Popen(["%s"%pov_binary,"-d","%s"%iniPrevFile],
+ stdout=subprocess.PIPE,stderr=subprocess.STDOUT)
p1.wait()
tex.use_nodes = True
@@ -3664,4 +4771,5 @@ class RenderPovTexturePreview(Operator):
links.new(im.outputs[0],previewer.inputs[0])
#tex.type="IMAGE" # makes clip extend possible
#tex.extension="CLIP"
- return {'FINISHED'} \ No newline at end of file
+ return {'FINISHED'}
+
diff --git a/render_povray/shading.py b/render_povray/shading.py
new file mode 100644
index 00000000..d882bfec
--- /dev/null
+++ b/render_povray/shading.py
@@ -0,0 +1,231 @@
+# For BI > POV shaders emulation
+import bpy
+
+def writeMaterial(DEF_MAT_NAME, scene, tabWrite, safety, comments, uniqueName, materialNames, material):
+ # Assumes only called once on each material
+ if material:
+ name_orig = material.name
+ name = materialNames[name_orig] = uniqueName(bpy.path.clean_name(name_orig), materialNames)
+ else:
+ name = name_orig = DEF_MAT_NAME
+
+
+ if material:
+ # If saturation(.s) is not zero, then color is not grey, and has a tint
+ colored_specular_found = (material.specular_color.s > 0.0)
+
+ ##################
+ # Several versions of the finish: Level conditions are variations for specular/Mirror
+ # texture channel map with alternative finish of 0 specular and no mirror reflection.
+ # Level=1 Means No specular nor Mirror reflection
+ # Level=2 Means translation of spec and mir levels for when no map influences them
+ # Level=3 Means Maximum Spec and Mirror
+
+ def povHasnoSpecularMaps(Level):
+ if Level == 1:
+ tabWrite("#declare %s = finish {" % safety(name, Level=1))
+ if comments:
+ file.write(" //No specular nor Mirror reflection\n")
+ else:
+ tabWrite("\n")
+ elif Level == 2:
+ tabWrite("#declare %s = finish {" % safety(name, Level=2))
+ if comments:
+ file.write(" //translation of spec and mir levels for when no map " \
+ "influences them\n")
+ else:
+ tabWrite("\n")
+ elif Level == 3:
+ tabWrite("#declare %s = finish {" % safety(name, Level=3))
+ if comments:
+ file.write(" //Maximum Spec and Mirror\n")
+ else:
+ tabWrite("\n")
+
+ if material:
+ # POV-Ray 3.7 now uses two diffuse values respectively for front and back shading
+ # (the back diffuse is like blender translucency)
+ frontDiffuse = material.diffuse_intensity
+ backDiffuse = material.translucency
+
+ if material.pov.conserve_energy:
+
+ #Total should not go above one
+ if (frontDiffuse + backDiffuse) <= 1.0:
+ pass
+ elif frontDiffuse == backDiffuse:
+ # Try to respect the user's 'intention' by comparing the two values but
+ # bringing the total back to one.
+ frontDiffuse = backDiffuse = 0.5
+ # Let the highest value stay the highest value.
+ elif frontDiffuse > backDiffuse:
+ # clamps the sum below 1
+ backDiffuse = min(backDiffuse, (1.0 - frontDiffuse))
+ else:
+ frontDiffuse = min(frontDiffuse, (1.0 - backDiffuse))
+
+ # map hardness between 0.0 and 1.0
+ roughness = ((1.0 - ((material.specular_hardness - 1.0) / 510.0)))
+ ## scale from 0.0 to 0.1
+ roughness *= 0.1
+ # add a small value because 0.0 is invalid.
+ roughness += (1.0 / 511.0)
+
+ ################################Diffuse Shader######################################
+ # Not used for Full spec (Level=3) of the shader.
+ if material.diffuse_shader == 'OREN_NAYAR' and Level != 3:
+ # Blender roughness is what is generally called oren nayar Sigma,
+ # and brilliance in POV-Ray.
+ tabWrite("brilliance %.3g\n" % (0.9 + material.roughness))
+
+ if material.diffuse_shader == 'TOON' and Level != 3:
+ tabWrite("brilliance %.3g\n" % (0.01 + material.diffuse_toon_smooth * 0.25))
+ # Lower diffuse and increase specular for toon effect seems to look better
+ # in POV-Ray.
+ frontDiffuse *= 0.5
+
+ if material.diffuse_shader == 'MINNAERT' and Level != 3:
+ #tabWrite("aoi %.3g\n" % material.darkness)
+ pass # let's keep things simple for now
+ if material.diffuse_shader == 'FRESNEL' and Level != 3:
+ #tabWrite("aoi %.3g\n" % material.diffuse_fresnel_factor)
+ pass # let's keep things simple for now
+ if material.diffuse_shader == 'LAMBERT' and Level != 3:
+ # trying to best match lambert attenuation by that constant brilliance value
+ tabWrite("brilliance 1.8\n")
+
+ if Level == 2:
+ ###########################Specular Shader######################################
+ # No difference between phong and cook torrence in blender HaHa!
+ if (material.specular_shader == 'COOKTORR' or
+ material.specular_shader == 'PHONG'):
+ tabWrite("phong %.3g\n" % (material.specular_intensity))
+ tabWrite("phong_size %.3g\n" % (material.specular_hardness / 2 + 0.25))
+
+ # POV-Ray 'specular' keyword corresponds to a Blinn model, without the ior.
+ elif material.specular_shader == 'BLINN':
+ # Use blender Blinn's IOR just as some factor for spec intensity
+ tabWrite("specular %.3g\n" % (material.specular_intensity *
+ (material.specular_ior / 4.0)))
+ tabWrite("roughness %.3g\n" % roughness)
+ #Could use brilliance 2(or varying around 2 depending on ior or factor) too.
+
+ elif material.specular_shader == 'TOON':
+ tabWrite("phong %.3g\n" % (material.specular_intensity * 2.0))
+ # use extreme phong_size
+ tabWrite("phong_size %.3g\n" % (0.1 + material.specular_toon_smooth / 2.0))
+
+ elif material.specular_shader == 'WARDISO':
+ # find best suited default constant for brilliance Use both phong and
+ # specular for some values.
+ tabWrite("specular %.3g\n" % (material.specular_intensity /
+ (material.specular_slope + 0.0005)))
+ # find best suited default constant for brilliance Use both phong and
+ # specular for some values.
+ tabWrite("roughness %.4g\n" % (0.0005 + material.specular_slope / 10.0))
+ # find best suited default constant for brilliance Use both phong and
+ # specular for some values.
+ tabWrite("brilliance %.4g\n" % (1.8 - material.specular_slope * 1.8))
+
+ ####################################################################################
+ elif Level == 1:
+ tabWrite("specular 0\n")
+ elif Level == 3:
+ tabWrite("specular 1\n")
+ tabWrite("diffuse %.3g %.3g\n" % (frontDiffuse, backDiffuse))
+
+ tabWrite("ambient %.3g\n" % material.ambient)
+ # POV-Ray blends the global value
+ #tabWrite("ambient rgb <%.3g, %.3g, %.3g>\n" % \
+ # tuple([c*material.ambient for c in world.ambient_color]))
+ tabWrite("emission %.3g\n" % material.emit) # New in POV-Ray 3.7
+
+ #POV-Ray just ignores roughness if there's no specular keyword
+ #tabWrite("roughness %.3g\n" % roughness)
+
+ if material.pov.conserve_energy:
+ # added for more realistic shading. Needs some checking to see if it
+ # really works. --Maurice.
+ tabWrite("conserve_energy\n")
+
+ if colored_specular_found == True:
+ tabWrite("metallic\n")
+
+ # 'phong 70.0 '
+ if Level != 1:
+ if material.raytrace_mirror.use:
+ raytrace_mirror = material.raytrace_mirror
+ if raytrace_mirror.reflect_factor:
+ tabWrite("reflection {\n")
+ tabWrite("rgb <%.3g, %.3g, %.3g>\n" % material.mirror_color[:])
+ if material.pov.mirror_metallic:
+ tabWrite("metallic %.3g\n" % (raytrace_mirror.reflect_factor))
+ # Blurry reflections for UberPOV
+ if using_uberpov and raytrace_mirror.gloss_factor < 1.0:
+ #tabWrite("#ifdef(unofficial) #if(unofficial = \"patch\") #if(patch(\"upov-reflection-roughness\") > 0)\n")
+ tabWrite("roughness %.6f\n" % \
+ (0.000001/raytrace_mirror.gloss_factor))
+ #tabWrite("#end #end #end\n") # This and previous comment for backward compatibility, messier pov code
+ if material.pov.mirror_use_IOR: # WORKING ?
+ # Removed from the line below: gives a more physically correct
+ # material but needs proper IOR. --Maurice
+ tabWrite("fresnel 1 ")
+ tabWrite("falloff %.3g exponent %.3g} " % \
+ (raytrace_mirror.fresnel, raytrace_mirror.fresnel_factor))
+
+ if material.subsurface_scattering.use:
+ subsurface_scattering = material.subsurface_scattering
+ tabWrite("subsurface { translucency <%.3g, %.3g, %.3g> }\n" % (
+ (subsurface_scattering.radius[0]),
+ (subsurface_scattering.radius[1]),
+ (subsurface_scattering.radius[2]),
+ )
+ )
+
+ if material.pov.irid_enable:
+ tabWrite("irid { %.4g thickness %.4g turbulence %.4g }" % \
+ (material.pov.irid_amount, material.pov.irid_thickness,
+ material.pov.irid_turbulence))
+
+ else:
+ tabWrite("diffuse 0.8\n")
+ tabWrite("phong 70.0\n")
+
+ #tabWrite("specular 0.2\n")
+
+ # This is written into the object
+ '''
+ if material and material.transparency_method=='RAYTRACE':
+ 'interior { ior %.3g} ' % material.raytrace_transparency.ior
+ '''
+
+ #tabWrite("crand 1.0\n") # Sand granyness
+ #tabWrite("metallic %.6f\n" % material.spec)
+ #tabWrite("phong %.6f\n" % material.spec)
+ #tabWrite("phong_size %.6f\n" % material.spec)
+ #tabWrite("brilliance %.6f " % (material.specular_hardness/256.0) # Like hardness
+
+ tabWrite("}\n\n")
+
+ # Level=2 Means translation of spec and mir levels for when no map influences them
+ povHasnoSpecularMaps(Level=2)
+
+ if material:
+ special_texture_found = False
+ for t in material.texture_slots:
+ if t and t.use:
+ if (t.texture.type == 'IMAGE' and t.texture.image) or t.texture.type != 'IMAGE':
+ validPath=True
+ else:
+ validPath=False
+ if(t and t.use and validPath and
+ (t.use_map_specular or t.use_map_raymir or t.use_map_normal or t.use_map_alpha)):
+ special_texture_found = True
+ continue # Some texture found
+
+ if special_texture_found or colored_specular_found:
+ # Level=1 Means No specular nor Mirror reflection
+ povHasnoSpecularMaps(Level=1)
+
+ # Level=3 Means Maximum Spec and Mirror
+ povHasnoSpecularMaps(Level=3)
diff --git a/render_povray/ui.py b/render_povray/ui.py
index 2d0b1099..3216ac2d 100644
--- a/render_povray/ui.py
+++ b/render_povray/ui.py
@@ -29,6 +29,7 @@ properties_render.RENDER_PT_shading.COMPAT_ENGINES.add('POVRAY_RENDER')
properties_render.RENDER_PT_output.COMPAT_ENGINES.add('POVRAY_RENDER')
del properties_render
+
# Use only a subset of the world panels
from bl_ui import properties_world
properties_world.WORLD_PT_preview.COMPAT_ENGINES.add('POVRAY_RENDER')
@@ -37,25 +38,8 @@ properties_world.WORLD_PT_world.COMPAT_ENGINES.add('POVRAY_RENDER')
properties_world.WORLD_PT_mist.COMPAT_ENGINES.add('POVRAY_RENDER')
del properties_world
-# Example of wrapping every class 'as is'
-from bl_ui import properties_material
-for member in dir(properties_material):
- subclass = getattr(properties_material, member)
- try:
- subclass.COMPAT_ENGINES.add('POVRAY_RENDER')
- except:
- pass
-del properties_material
-
-from bl_ui import properties_data_mesh
-for member in dir(properties_data_mesh):
- subclass = getattr(properties_data_mesh, member)
- try:
- subclass.COMPAT_ENGINES.add('POVRAY_RENDER')
- except:
- pass
-del properties_data_mesh
+# Example of wrapping every class 'as is'
from bl_ui import properties_texture
from bl_ui.properties_texture import context_tex_datablock
for member in dir(properties_texture):
@@ -67,6 +51,20 @@ for member in dir(properties_texture):
del properties_texture
+# Example of wrapping every class 'as is' except some
+from bl_ui import properties_material
+for member in dir(properties_material):
+ subclass = getattr(properties_material, member)
+ if subclass not in (properties_material.MATERIAL_PT_transp_game,
+ properties_material.MATERIAL_PT_game_settings,
+ properties_material.MATERIAL_PT_physics):
+ try:
+ subclass.COMPAT_ENGINES.add('POVRAY_RENDER')
+ except:
+ pass
+del properties_material
+
+
from bl_ui import properties_data_camera
for member in dir(properties_data_camera):
subclass = getattr(properties_data_camera, member)
@@ -76,14 +74,8 @@ for member in dir(properties_data_camera):
pass
del properties_data_camera
-from bl_ui import properties_data_lamp
-for member in dir(properties_data_lamp):
- subclass = getattr(properties_data_lamp, member)
- try:
- subclass.COMPAT_ENGINES.add('POVRAY_RENDER')
- except:
- pass
-del properties_data_lamp
+
+
from bl_ui import properties_particle as properties_particle
for member in dir(properties_particle): # add all "particle" panels from blender
@@ -94,7 +86,6 @@ for member in dir(properties_particle): # add all "particle" panels from blende
pass
del properties_particle
-
class RenderButtonsPanel():
bl_space_type = 'PROPERTIES'
bl_region_type = 'WINDOW'
@@ -153,7 +144,6 @@ class ObjectButtonsPanel():
rd = context.scene.render
return obj and (rd.use_game_engine is False) and (rd.engine in cls.COMPAT_ENGINES)
-
class CameraDataButtonsPanel():
bl_space_type = 'PROPERTIES'
bl_region_type = 'WINDOW'
@@ -190,7 +180,217 @@ class TextButtonsPanel():
rd = context.scene.render
return text and (rd.use_game_engine is False) and (rd.engine in cls.COMPAT_ENGINES)
+from bl_ui import properties_data_mesh
+# These panels are kept
+properties_data_mesh.DATA_PT_custom_props_mesh.COMPAT_ENGINES.add('POVRAY_RENDER')
+properties_data_mesh.DATA_PT_context_mesh.COMPAT_ENGINES.add('POVRAY_RENDER')
+
+## make some native panels contextual to some object variable
+## by recreating custom panels inheriting their properties
+
+class PovDataButtonsPanel(properties_data_mesh.MeshButtonsPanel):
+ COMPAT_ENGINES = {'POVRAY_RENDER'}
+ POV_OBJECT_TYPES = {'PLANE', 'BOX', 'SPHERE', 'CYLINDER', 'CONE', 'TORUS', 'BLOB',
+ 'ISOSURFACE', 'SUPERELLIPSOID', 'SUPERTORUS', 'HEIGHT_FIELD',
+ 'PARAMETRIC', 'POLYCIRCLE'}
+
+ @classmethod
+ def poll(cls, context):
+ engine = context.scene.render.engine
+ obj = context.object
+ # We use our parent class poll func too, avoids to re-define too much things...
+ return (super(PovDataButtonsPanel, cls).poll(context) and
+ obj and obj.pov.object_as not in cls.POV_OBJECT_TYPES)
+
+
+# We cannot inherit from RNA classes (like e.g. properties_data_mesh.DATA_PT_vertex_groups).
+# Complex py/bpy/rna interactions (with metaclass and all) simply do not allow it to work.
+# So we simply have to explicitly copy here the interesting bits. ;)
+class DATA_PT_POV_normals(PovDataButtonsPanel, bpy.types.Panel):
+ bl_label = properties_data_mesh.DATA_PT_normals.bl_label
+
+ draw = properties_data_mesh.DATA_PT_normals.draw
+
+
+class DATA_PT_POV_texture_space(PovDataButtonsPanel, bpy.types.Panel):
+ bl_label = properties_data_mesh.DATA_PT_texture_space.bl_label
+ bl_options = properties_data_mesh.DATA_PT_texture_space.bl_options
+
+ draw = properties_data_mesh.DATA_PT_texture_space.draw
+
+
+class DATA_PT_POV_vertex_groups(PovDataButtonsPanel, bpy.types.Panel):
+ bl_label = properties_data_mesh.DATA_PT_vertex_groups.bl_label
+
+ draw = properties_data_mesh.DATA_PT_vertex_groups.draw
+
+
+class DATA_PT_POV_shape_keys(PovDataButtonsPanel, bpy.types.Panel):
+ bl_label = properties_data_mesh.DATA_PT_shape_keys.bl_label
+
+ draw = properties_data_mesh.DATA_PT_shape_keys.draw
+
+
+class DATA_PT_POV_uv_texture(PovDataButtonsPanel, bpy.types.Panel):
+ bl_label = properties_data_mesh.DATA_PT_uv_texture.bl_label
+
+ draw = properties_data_mesh.DATA_PT_uv_texture.draw
+
+
+class DATA_PT_POV_vertex_colors(PovDataButtonsPanel, bpy.types.Panel):
+ bl_label = properties_data_mesh.DATA_PT_vertex_colors.bl_label
+
+ draw = properties_data_mesh.DATA_PT_vertex_colors.draw
+
+
+class DATA_PT_POV_customdata(PovDataButtonsPanel, bpy.types.Panel):
+ bl_label = properties_data_mesh.DATA_PT_customdata.bl_label
+ bl_options = properties_data_mesh.DATA_PT_customdata.bl_options
+ draw = properties_data_mesh.DATA_PT_customdata.draw
+
+
+
+del properties_data_mesh
+
+
+################################################################################
+# from bl_ui import properties_data_lamp
+# for member in dir(properties_data_lamp):
+ # subclass = getattr(properties_data_lamp, member)
+ # try:
+ # subclass.COMPAT_ENGINES.add('POVRAY_RENDER')
+ # except:
+ # pass
+# del properties_data_lamp
+#########################LAMPS################################
+
+from bl_ui import properties_data_lamp
+
+# These panels are kept
+properties_data_lamp.DATA_PT_custom_props_lamp.COMPAT_ENGINES.add('POVRAY_RENDER')
+properties_data_lamp.DATA_PT_context_lamp.COMPAT_ENGINES.add('POVRAY_RENDER')
+
+## make some native panels contextual to some object variable
+## by recreating custom panels inheriting their properties
+class PovLampButtonsPanel(properties_data_lamp.DataButtonsPanel):
+ COMPAT_ENGINES = {'POVRAY_RENDER'}
+ POV_OBJECT_TYPES = {'RAINBOW'}
+
+ @classmethod
+ def poll(cls, context):
+ engine = context.scene.render.engine
+ obj = context.object
+ # We use our parent class poll func too, avoids to re-define too much things...
+ return (super(PovLampButtonsPanel, cls).poll(context) and
+ obj and obj.pov.object_as not in cls.POV_OBJECT_TYPES)
+
+
+# We cannot inherit from RNA classes (like e.g. properties_data_mesh.DATA_PT_vertex_groups).
+# Complex py/bpy/rna interactions (with metaclass and all) simply do not allow it to work.
+# So we simply have to explicitly copy here the interesting bits. ;)
+
+class LAMP_PT_POV_preview(PovLampButtonsPanel, bpy.types.Panel):
+ bl_label = properties_data_lamp.DATA_PT_preview.bl_label
+
+ draw = properties_data_lamp.DATA_PT_preview.draw
+
+class LAMP_PT_POV_lamp(PovLampButtonsPanel, bpy.types.Panel):
+ bl_label = properties_data_lamp.DATA_PT_lamp.bl_label
+
+ draw = properties_data_lamp.DATA_PT_lamp.draw
+
+class LAMP_PT_POV_sunsky(PovLampButtonsPanel, bpy.types.Panel):
+ bl_label = properties_data_lamp.DATA_PT_sunsky.bl_label
+
+ @classmethod
+ def poll(cls, context):
+ lamp = context.lamp
+ engine = context.scene.render.engine
+ return (lamp and lamp.type == 'SUN') and (engine in cls.COMPAT_ENGINES)
+
+ draw = properties_data_lamp.DATA_PT_sunsky.draw
+
+class LAMP_PT_POV_shadow(PovLampButtonsPanel, bpy.types.Panel):
+ bl_label = properties_data_lamp.DATA_PT_shadow.bl_label
+
+ draw = properties_data_lamp.DATA_PT_shadow.draw
+
+class LAMP_PT_POV_area(PovLampButtonsPanel, bpy.types.Panel):
+ bl_label = properties_data_lamp.DATA_PT_area.bl_label
+
+ @classmethod
+ def poll(cls, context):
+ lamp = context.lamp
+ engine = context.scene.render.engine
+ return (lamp and lamp.type == 'AREA') and (engine in cls.COMPAT_ENGINES)
+
+ draw = properties_data_lamp.DATA_PT_area.draw
+class LAMP_PT_POV_spot(PovLampButtonsPanel, bpy.types.Panel):
+ bl_label = properties_data_lamp.DATA_PT_spot.bl_label
+
+ @classmethod
+ def poll(cls, context):
+ lamp = context.lamp
+ engine = context.scene.render.engine
+ return (lamp and lamp.type == 'SPOT') and (engine in cls.COMPAT_ENGINES)
+ draw = properties_data_lamp.DATA_PT_spot.draw
+
+class LAMP_PT_POV_falloff_curve(PovLampButtonsPanel, bpy.types.Panel):
+ bl_label = properties_data_lamp.DATA_PT_falloff_curve.bl_label
+ bl_options = properties_data_lamp.DATA_PT_falloff_curve.bl_options
+
+ @classmethod
+ def poll(cls, context):
+ lamp = context.lamp
+ engine = context.scene.render.engine
+
+ return (lamp and lamp.type in {'POINT', 'SPOT'} and lamp.falloff_type == 'CUSTOM_CURVE') and (engine in cls.COMPAT_ENGINES)
+ draw = properties_data_lamp.DATA_PT_falloff_curve.draw
+
+class OBJECT_PT_povray_obj_rainbow(PovLampButtonsPanel, bpy.types.Panel):
+ bl_label = "POV-Ray Rainbow"
+ COMPAT_ENGINES = {'POVRAY_RENDER'}
+ #bl_options = {'HIDE_HEADER'}
+ @classmethod
+ def poll(cls, context):
+ engine = context.scene.render.engine
+ obj = context.object
+ return (obj and obj.pov.object_as == 'RAINBOW' and (engine in cls.COMPAT_ENGINES))
+ def draw(self, context):
+ layout = self.layout
+
+ obj = context.object
+
+ col = layout.column()
+
+ if obj.pov.object_as == 'RAINBOW':
+ if obj.pov.unlock_parameters == False:
+ col.prop(obj.pov, "unlock_parameters", text="Exported parameters below", icon='LOCKED')
+ col.label(text="Rainbow projection angle: " + str(obj.data.spot_size))
+ col.label(text="Rainbow width: " + str(obj.data.spot_blend))
+ col.label(text="Rainbow distance: " + str(obj.data.shadow_buffer_clip_start))
+ col.label(text="Rainbow arc angle: " + str(obj.pov.arc_angle))
+ col.label(text="Rainbow falloff angle: " + str(obj.pov.falloff_angle))
+
+ else:
+ col.prop(obj.pov, "unlock_parameters", text="Edit exported parameters", icon='UNLOCKED')
+ col.label(text="3D view proxy may get out of synch")
+ col.active = obj.pov.unlock_parameters
+
+
+ layout.operator("pov.cone_update", text="Update",icon="MESH_CONE")
+
+ #col.label(text="Parameters:")
+ col.prop(obj.data, "spot_size", text="Rainbow Projection Angle")
+ col.prop(obj.data, "spot_blend", text="Rainbow width")
+ col.prop(obj.data, "shadow_buffer_clip_start", text="Visibility distance")
+ col.prop(obj.pov, "arc_angle")
+ col.prop(obj.pov, "falloff_angle")
+
+del properties_data_lamp
+###############################################################################
+
class RENDER_PT_povray_export_settings(RenderButtonsPanel, bpy.types.Panel):
bl_label = "Export Settings"
COMPAT_ENGINES = {'POVRAY_RENDER'}
@@ -267,9 +467,11 @@ class RENDER_PT_povray_antialias(RenderButtonsPanel, bpy.types.Panel):
bl_label = "Anti-Aliasing"
COMPAT_ENGINES = {'POVRAY_RENDER'}
+
def draw_header(self, context):
+ prefs = bpy.context.user_preferences.addons[__package__].preferences
scene = context.scene
- if bpy.context.user_preferences.addons[__package__].preferences.branch_feature_set_povray != 'uberpov' and scene.pov.antialias_method =='2':
+ if prefs.branch_feature_set_povray != 'uberpov' and scene.pov.antialias_method == '2':
self.layout.prop(scene.pov, "antialias_enable", text="", icon='ERROR')
elif scene.pov.antialias_enable:
self.layout.prop(scene.pov, "antialias_enable", text="", icon='ANTIALIASED')
@@ -277,16 +479,16 @@ class RENDER_PT_povray_antialias(RenderButtonsPanel, bpy.types.Panel):
self.layout.prop(scene.pov, "antialias_enable", text="", icon='ALIASED')
def draw(self, context):
+ prefs = bpy.context.user_preferences.addons[__package__].preferences
layout = self.layout
-
scene = context.scene
-
+
layout.active = scene.pov.antialias_enable
-
row = layout.row()
row.prop(scene.pov, "antialias_method", text="")
- if bpy.context.user_preferences.addons[__package__].preferences.branch_feature_set_povray != 'uberpov' and scene.pov.antialias_method =='2':
+
+ if prefs.branch_feature_set_povray != 'uberpov' and scene.pov.antialias_method == '2':
col = layout.column()
col.alignment = 'CENTER'
col.label(text="Stochastic Anti Aliasing is")
@@ -309,11 +511,11 @@ class RENDER_PT_povray_antialias(RenderButtonsPanel, bpy.types.Panel):
row = layout.row()
row.prop(scene.pov, "antialias_threshold", text="AA Threshold")
row.prop(scene.pov, "antialias_gamma", text="AA Gamma")
-
- if bpy.context.user_preferences.addons[__package__].preferences.branch_feature_set_povray == 'uberpov':
+
+ if prefs.branch_feature_set_povray == 'uberpov':
row = layout.row()
row.prop(scene.pov, "antialias_confidence", text="AA Confidence")
- if scene.pov.antialias_method =='2':
+ if scene.pov.antialias_method == '2':
row.enabled = True
else:
row.enabled = False
@@ -691,22 +893,26 @@ class TEXTURE_PT_povray_parameters(TextureButtonsPanel, bpy.types.Panel):
"f_quartic_saddle","f_sphere","f_steiners_roman",
"f_torus_gumdrop","f_umbrella"}:
func = 1
- if tex.pov.func_list in {"f_bicorn","f_bifolia","f_boy_surface","f_superellipsoid","f_torus"}:
+ if tex.pov.func_list in {"f_bicorn","f_bifolia","f_boy_surface","f_superellipsoid",
+ "f_torus"}:
func = 2
if tex.pov.func_list in {"f_ellipsoid","f_folium_surface","f_hyperbolic_torus",
"f_kampyle_of_eudoxus","f_parabolic_torus",
"f_quartic_cylinder","f_torus2"}:
func = 3
if tex.pov.func_list in {"f_blob2","f_cross_ellipsoids","f_flange_cover",
- "f_isect_ellipsoids","f_kummer_surface_v2","f_ovals_of_cassini",
- "f_rounded_box","f_spikes_2d","f_strophoid"}:
+ "f_isect_ellipsoids","f_kummer_surface_v2",
+ "f_ovals_of_cassini","f_rounded_box","f_spikes_2d",
+ "f_strophoid"}:
func = 4
- if tex.pov.func_list in {"f_algbr_cyl1","f_algbr_cyl2","f_algbr_cyl3","f_algbr_cyl4",
- "f_blob","f_mesh1","f_poly4","f_spikes"}:
+ if tex.pov.func_list in {"f_algbr_cyl1","f_algbr_cyl2","f_algbr_cyl3",
+ "f_algbr_cyl4","f_blob","f_mesh1","f_poly4","f_spikes"}:
func = 5
- if tex.pov.func_list in {"f_devils_curve_2d","f_dupin_cyclid","f_folium_surface_2d",
- "f_hetero_mf","f_kampyle_of_eudoxus_2d","f_lemniscate_of_gerono_2d",
- "f_polytubes","f_ridge","f_ridged_mf","f_spiral","f_witch_of_agnesi"}:
+ if tex.pov.func_list in {"f_devils_curve_2d","f_dupin_cyclid",
+ "f_folium_surface_2d","f_hetero_mf",
+ "f_kampyle_of_eudoxus_2d","f_lemniscate_of_gerono_2d",
+ "f_polytubes","f_ridge","f_ridged_mf","f_spiral",
+ "f_witch_of_agnesi"}:
func = 6
if tex.pov.func_list in {"f_helix1","f_helix2","f_piriform_2d","f_strophoid_2d"}:
func = 7
@@ -844,6 +1050,249 @@ class OBJECT_PT_povray_obj_importance(ObjectButtonsPanel, bpy.types.Panel):
col.prop(obj.pov, "spacing_multiplier", text="Photons Spacing Multiplier")
+class OBJECT_PT_povray_obj_sphere(PovDataButtonsPanel, bpy.types.Panel):
+ bl_label = "POV-Ray Sphere"
+ COMPAT_ENGINES = {'POVRAY_RENDER'}
+ #bl_options = {'HIDE_HEADER'}
+ @classmethod
+ def poll(cls, context):
+ engine = context.scene.render.engine
+ obj = context.object
+ return (obj and obj.pov.object_as == 'SPHERE' and (engine in cls.COMPAT_ENGINES))
+ def draw(self, context):
+ layout = self.layout
+
+ obj = context.object
+
+ col = layout.column()
+
+ if obj.pov.object_as == 'SPHERE':
+ if obj.pov.unlock_parameters == False:
+ col.prop(obj.pov, "unlock_parameters", text="Exported parameters below", icon='LOCKED')
+ col.label(text="Sphere radius: " + str(obj.pov.sphere_radius))
+
+ else:
+ col.prop(obj.pov, "unlock_parameters", text="Edit exported parameters", icon='UNLOCKED')
+ col.label(text="3D view proxy may get out of synch")
+ col.active = obj.pov.unlock_parameters
+
+
+ layout.operator("pov.sphere_update", text="Update",icon="SOLID")
+
+ #col.label(text="Parameters:")
+ col.prop(obj.pov, "sphere_radius", text="Radius of Sphere")
+
+
+class OBJECT_PT_povray_obj_cone(PovDataButtonsPanel, bpy.types.Panel):
+ bl_label = "POV-Ray Cone"
+ COMPAT_ENGINES = {'POVRAY_RENDER'}
+ #bl_options = {'HIDE_HEADER'}
+ @classmethod
+ def poll(cls, context):
+ engine = context.scene.render.engine
+ obj = context.object
+ return (obj and obj.pov.object_as == 'CONE' and (engine in cls.COMPAT_ENGINES))
+ def draw(self, context):
+ layout = self.layout
+
+ obj = context.object
+
+ col = layout.column()
+
+ if obj.pov.object_as == 'CONE':
+ if obj.pov.unlock_parameters == False:
+ col.prop(obj.pov, "unlock_parameters", text="Exported parameters below", icon='LOCKED')
+ col.label(text="Cone base radius: " + str(obj.pov.cone_base_radius))
+ col.label(text="Cone cap radius: " + str(obj.pov.cone_cap_radius))
+ col.label(text="Cone proxy segments: " + str(obj.pov.cone_segments))
+ col.label(text="Cone height: " + str(obj.pov.cone_height))
+ else:
+ col.prop(obj.pov, "unlock_parameters", text="Edit exported parameters", icon='UNLOCKED')
+ col.label(text="3D view proxy may get out of synch")
+ col.active = obj.pov.unlock_parameters
+
+
+ layout.operator("pov.cone_update", text="Update",icon="MESH_CONE")
+
+ #col.label(text="Parameters:")
+ col.prop(obj.pov, "cone_base_radius", text="Radius of Cone Base")
+ col.prop(obj.pov, "cone_cap_radius", text="Radius of Cone Cap")
+ col.prop(obj.pov, "cone_segments", text="Segmentation of Cone proxy")
+ col.prop(obj.pov, "cone_height", text="Height of the cone")
+
+class OBJECT_PT_povray_obj_superellipsoid(PovDataButtonsPanel, bpy.types.Panel):
+ bl_label = "POV-Ray Superquadric ellipsoid"
+ COMPAT_ENGINES = {'POVRAY_RENDER'}
+ #bl_options = {'HIDE_HEADER'}
+ @classmethod
+ def poll(cls, context):
+ engine = context.scene.render.engine
+ obj = context.object
+ return (obj and obj.pov.object_as == 'SUPERELLIPSOID' and (engine in cls.COMPAT_ENGINES))
+ def draw(self, context):
+ layout = self.layout
+
+ obj = context.object
+
+ col = layout.column()
+
+ if obj.pov.object_as == 'SUPERELLIPSOID':
+ if obj.pov.unlock_parameters == False:
+ col.prop(obj.pov, "unlock_parameters", text="Exported parameters below", icon='LOCKED')
+ col.label(text="Radial segmentation: " + str(obj.pov.se_u))
+ col.label(text="Lateral segmentation: " + str(obj.pov.se_v))
+ col.label(text="Ring shape: " + str(obj.pov.se_n1))
+ col.label(text="Cross-section shape: " + str(obj.pov.se_n2))
+ col.label(text="Fill up and down: " + str(obj.pov.se_edit))
+ else:
+ col.prop(obj.pov, "unlock_parameters", text="Edit exported parameters", icon='UNLOCKED')
+ col.label(text="3D view proxy may get out of synch")
+ col.active = obj.pov.unlock_parameters
+
+
+ layout.operator("pov.superellipsoid_update", text="Update",icon="MOD_SUBSURF")
+
+ #col.label(text="Parameters:")
+ col.prop(obj.pov, "se_u")
+ col.prop(obj.pov, "se_v")
+ col.prop(obj.pov, "se_n1")
+ col.prop(obj.pov, "se_n2")
+ col.prop(obj.pov, "se_edit")
+
+
+class OBJECT_PT_povray_obj_torus(PovDataButtonsPanel, bpy.types.Panel):
+ bl_label = "POV-Ray Torus"
+ COMPAT_ENGINES = {'POVRAY_RENDER'}
+ #bl_options = {'HIDE_HEADER'}
+ @classmethod
+ def poll(cls, context):
+ engine = context.scene.render.engine
+ obj = context.object
+ return (obj and obj.pov.object_as == 'TORUS' and (engine in cls.COMPAT_ENGINES))
+ def draw(self, context):
+ layout = self.layout
+
+ obj = context.object
+
+ col = layout.column()
+
+ if obj.pov.object_as == 'TORUS':
+ if obj.pov.unlock_parameters == False:
+ col.prop(obj.pov, "unlock_parameters", text="Exported parameters below", icon='LOCKED')
+ col.label(text="Torus major radius: " + str(obj.pov.torus_major_radius))
+ col.label(text="Torus minor radius: " + str(obj.pov.torus_minor_radius))
+ col.label(text="Torus major segments: " + str(obj.pov.torus_major_segments))
+ col.label(text="Torus minor segments: " + str(obj.pov.torus_minor_segments))
+ else:
+ col.prop(obj.pov, "unlock_parameters", text="Edit exported parameters", icon='UNLOCKED')
+ col.label(text="3D view proxy may get out of synch")
+ col.active = obj.pov.unlock_parameters
+
+
+ layout.operator("pov.torus_update", text="Update",icon="MESH_TORUS")
+
+ #col.label(text="Parameters:")
+ col.prop(obj.pov, "torus_major_radius")
+ col.prop(obj.pov, "torus_minor_radius")
+ col.prop(obj.pov, "torus_major_segments")
+ col.prop(obj.pov, "torus_minor_segments")
+
+class OBJECT_PT_povray_obj_supertorus(PovDataButtonsPanel, bpy.types.Panel):
+ bl_label = "POV-Ray SuperTorus"
+ COMPAT_ENGINES = {'POVRAY_RENDER'}
+ #bl_options = {'HIDE_HEADER'}
+ @classmethod
+ def poll(cls, context):
+ engine = context.scene.render.engine
+ obj = context.object
+ return (obj and obj.pov.object_as == 'SUPERTORUS' and (engine in cls.COMPAT_ENGINES))
+ def draw(self, context):
+ layout = self.layout
+
+ obj = context.object
+
+ col = layout.column()
+
+ if obj.pov.object_as == 'SUPERTORUS':
+ if obj.pov.unlock_parameters == False:
+ col.prop(obj.pov, "unlock_parameters", text="Exported parameters below", icon='LOCKED')
+ col.label(text="SuperTorus major radius: " + str(obj.pov.st_major_radius))
+ col.label(text="SuperTorus minor radius: " + str(obj.pov.st_minor_radius))
+ col.label(text="SuperTorus major segments: " + str(obj.pov.st_u))
+ col.label(text="SuperTorus minor segments: " + str(obj.pov.st_v))
+
+ col.label(text="SuperTorus Ring Manipulator: " + str(obj.pov.st_ring))
+ col.label(text="SuperTorus Cross Manipulator: " + str(obj.pov.st_cross))
+ col.label(text="SuperTorus Internal And External radii: " + str(obj.pov.st_ie))
+
+ col.label(text="SuperTorus accuracy: " + str(ob.pov.st_accuracy))
+ col.label(text="SuperTorus max gradient: " + str(ob.pov.st_max_gradient))
+
+
+ else:
+ col.prop(obj.pov, "unlock_parameters", text="Edit exported parameters", icon='UNLOCKED')
+ col.label(text="3D view proxy may get out of synch")
+ col.active = obj.pov.unlock_parameters
+
+
+ layout.operator("pov.supertorus_update", text="Update",icon="MESH_TORUS")
+
+ #col.label(text="Parameters:")
+ col.prop(obj.pov, "st_major_radius")
+ col.prop(obj.pov, "st_minor_radius")
+ col.prop(obj.pov, "st_u")
+ col.prop(obj.pov, "st_v")
+ col.prop(obj.pov, "st_ring")
+ col.prop(obj.pov, "st_cross")
+ col.prop(obj.pov, "st_ie")
+ #col.prop(obj.pov, "st_edit") #?
+ col.prop(obj.pov, "st_accuracy")
+ col.prop(obj.pov, "st_max_gradient")
+
+class OBJECT_PT_povray_obj_parametric(PovDataButtonsPanel, bpy.types.Panel):
+ bl_label = "POV-Ray Parametric surface"
+ COMPAT_ENGINES = {'POVRAY_RENDER'}
+ #bl_options = {'HIDE_HEADER'}
+ @classmethod
+ def poll(cls, context):
+ engine = context.scene.render.engine
+ obj = context.object
+ return (obj and obj.pov.object_as == 'PARAMETRIC' and (engine in cls.COMPAT_ENGINES))
+ def draw(self, context):
+ layout = self.layout
+
+ obj = context.object
+
+ col = layout.column()
+
+ if obj.pov.object_as == 'PARAMETRIC':
+ if obj.pov.unlock_parameters == False:
+ col.prop(obj.pov, "unlock_parameters", text="Exported parameters below", icon='LOCKED')
+ col.label(text="Minimum U: " + str(obj.pov.u_min))
+ col.label(text="Minimum V: " + str(obj.pov.v_min))
+ col.label(text="Maximum U: " + str(obj.pov.u_max))
+ col.label(text="Minimum V: " + str(obj.pov.v_min))
+ col.label(text="X Function: " + str(obj.pov.x_eq))
+ col.label(text="Y Function: " + str(obj.pov.y_eq))
+ col.label(text="Z Function: " + str(obj.pov.x_eq))
+
+ else:
+ col.prop(obj.pov, "unlock_parameters", text="Edit exported parameters", icon='UNLOCKED')
+ col.label(text="3D view proxy may get out of synch")
+ col.active = obj.pov.unlock_parameters
+
+
+ layout.operator("pov.parametric_update", text="Update",icon="SCRIPTPLUGINS")
+
+ col.prop(obj.pov, "u_min", text="Minimum U")
+ col.prop(obj.pov, "v_min", text="Minimum V")
+ col.prop(obj.pov, "u_max", text="Maximum U")
+ col.prop(obj.pov, "v_max", text="Minimum V")
+ col.prop(obj.pov, "x_eq", text="X Function")
+ col.prop(obj.pov, "y_eq", text="Y Function")
+ col.prop(obj.pov, "z_eq", text="Z Function")
+
+
class OBJECT_PT_povray_replacement_text(ObjectButtonsPanel, bpy.types.Panel):
bl_label = "Custom POV Code"
COMPAT_ENGINES = {'POVRAY_RENDER'}
@@ -857,7 +1306,86 @@ class OBJECT_PT_povray_replacement_text(ObjectButtonsPanel, bpy.types.Panel):
col.label(text="Replace properties with:")
col.prop(obj.pov, "replacement_text", text="")
+###############################################################################
+# Add Povray Objects
+###############################################################################
+
+class Povray_primitives_add_menu(bpy.types.Menu):
+ """Define the menu with presets"""
+ bl_idname = "Povray_primitives_add_menu"
+ bl_label = "Povray"
+ COMPAT_ENGINES = {'POVRAY_RENDER'}
+
+ @classmethod
+ def poll(cls, context):
+ engine = context.scene.render.engine
+ return (engine == 'POVRAY_RENDER')
+
+ def draw(self,context):
+ layout = self.layout
+ layout.operator_context = 'INVOKE_REGION_WIN'
+ layout.menu(BasicShapesMenu.bl_idname, text = "Primitives",icon="GROUP")
+ layout.menu(ImportMenu.bl_idname, text = "Import",icon="IMPORT")
+
+class BasicShapesMenu(bpy.types.Menu):
+ bl_idname = "Basic_shapes_calls"
+ bl_label = "Basic_shapes"
+
+ def draw(self,context):
+ pov = bpy.types.Object.pov #context.object.pov ?
+ layout = self.layout
+ layout.operator_context = 'INVOKE_REGION_WIN'
+ layout.operator("pov.addplane", text="Infinite Plane",icon = 'MESH_PLANE')
+ layout.operator("pov.addbox", text="Box",icon = 'MESH_CUBE')
+ layout.operator("pov.addsphere", text="Sphere",icon = 'SOLID')
+ layout.operator("pov.addcylinder", text="Cylinder",icon="MESH_CYLINDER")
+ layout.operator("pov.cone_add", text="Cone",icon="MESH_CONE")
+ layout.operator("pov.addtorus", text="Torus",icon = 'MESH_TORUS')
+ layout.separator()
+ layout.operator("pov.addparametric", text="Parametric",icon = 'SCRIPTPLUGINS')
+ layout.operator("pov.addrainbow", text="Rainbow",icon="COLOR")
+ layout.operator("pov.addlathe", text="Lathe",icon = 'MOD_SCREW')
+ layout.operator("pov.addprism", text="Prism",icon = 'MOD_SOLIDIFY')
+ layout.operator("pov.addsuperellipsoid", text="Superquadric Ellipsoid",icon = 'MOD_SUBSURF')
+ layout.operator("pov.addheightfield", text="Height Field",icon="RNDCURVE")
+ layout.operator("pov.addspheresweep", text="Sphere Sweep",icon = 'FORCE_CURVE')
+ layout.separator()
+ layout.operator("pov.addblobsphere", text="Blob Sphere",icon = 'META_DATA')
+ layout.separator()
+ layout.label("Isosurfaces")
+ layout.operator("pov.addisosurfacebox", text="Isosurface Box",icon="META_CUBE")
+ layout.operator("pov.addisosurfacesphere", text="Isosurface Sphere",icon="META_BALL")
+ layout.operator("pov.addsupertorus", text="Supertorus",icon="SURFACE_NTORUS")
+ layout.separator()
+ layout.label(text = "Macro based")
+ layout.operator("pov.addpolygontocircle", text="Polygon To Circle Blending",icon="RETOPO")
+ layout.operator("pov.addloft", text="Loft",icon="SURFACE_NSURFACE")
+
+class ImportMenu(bpy.types.Menu):
+ bl_idname = "Importer_calls"
+ bl_label = "Import"
+
+ def draw(self,context):
+ pov = bpy.types.Object.pov #context.object.pov ?
+ layout = self.layout
+ layout.operator_context = 'INVOKE_REGION_WIN'
+ layout.operator("import_scene.avogadro", text="Avogadro",icon="FORCE_LENNARDJONES")
+
+def menu_func_add(self, context):
+ engine = context.scene.render.engine
+ if engine == 'POVRAY_RENDER':
+ self.layout.menu("Povray_primitives_add_menu", icon="PLUGIN")
+
+def menu_func_import(self, context):
+ engine = context.scene.render.engine
+ if engine == 'POVRAY_RENDER':
+ self.layout.operator("import_scene.avogadro", text="POV-Ray Avogadro (.pov)",icon="FORCE_LENNARDJONES")
+
+
+###############################################################################
+# Camera Povray Settings
+###############################################################################
class CAMERA_PT_povray_cam_dof(CameraDataButtonsPanel, bpy.types.Panel):
bl_label = "POV-Ray Depth Of Field"
COMPAT_ENGINES = {'POVRAY_RENDER'}
@@ -911,3 +1439,4 @@ class TEXT_PT_povray_custom_code(TextButtonsPanel, bpy.types.Panel):
text = context.space_data.text
if text:
layout.prop(text.pov, "custom_code", text="Add as POV code")
+
diff --git a/render_renderfarmfi/__init__.py b/render_renderfarmfi/__init__.py
deleted file mode 100644
index 40c601be..00000000
--- a/render_renderfarmfi/__init__.py
+++ /dev/null
@@ -1,224 +0,0 @@
-# ##### BEGIN GPL LICENSE BLOCK #####
-#
-# This program is free software; you can redistribute it and/or
-# modify it under the terms of the GNU General Public License
-# as published by the Free Software Foundation; either version 2
-# of the License, or (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program; if not, write to the Free Software Foundation,
-# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
-#
-# ##### END GPL LICENSE BLOCK #####
-
-bl_info = {
- "name": "Renderfarm.fi",
- "author": "Nathan Letwory <nathan@letworyinteractive.com>, "
- "Jesse Kaukonen <jesse.kaukonen@gmail.com>",
- "version": (23,),
- "blender": (2, 63, 0),
- "location": "Render > Engine > Renderfarm.fi",
- "description": "Send .blend as session to http://www.renderfarm.fi to render",
- "warning": "",
- "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.5/Py/"
- "Scripts/Render/Renderfarm.fi",
- "category": "Render",
-}
-
-"""
-Copyright 2009-2013 Laurea University of Applied Sciences
-Authors: Nathan Letwory, Jesse Kaukonen
-"""
-
-import bpy
-import hashlib
-import http.client
-import math
-from os.path import isabs, isfile, join, exists
-import os
-import time
-
-from bpy.props import PointerProperty, StringProperty, BoolProperty, EnumProperty, IntProperty, CollectionProperty
-
-from .panels import *
-from .operators import *
-from .rpc import RffiRpc
-
-bpy.CURRENT_VERSION = bl_info["version"][0]
-bpy.found_newer_version = False
-bpy.up_to_date = False
-bpy.download_location = 'http://www.renderfarm.fi/blender'
-
-bpy.rffi_creds_found = False
-bpy.rffi_user = ''
-bpy.rffi_hash = ''
-bpy.passwordCorrect = False
-bpy.loginInserted = False
-bpy.rffi_accepting = False
-bpy.rffi_motd = ''
-
-bpy.errorMessages = {
- 'missing_desc': 'You need to enter a title, short and long description',
- 'missing_creds': 'You haven\'t entered your credentials yet'
-}
-
-bpy.statusMessage = {
- 'title': 'TRIA_RIGHT',
- 'shortdesc': 'TRIA_RIGHT',
- 'tags': 'TRIA_RIGHT',
- 'longdesc': 'TRIA_RIGHT',
- 'username': 'TRIA_RIGHT',
- 'password': 'TRIA_RIGHT'
-}
-
-bpy.errors = []
-bpy.ore_sessions = []
-bpy.ore_completed_sessions = []
-bpy.ore_active_sessions = []
-bpy.ore_rejected_sessions = []
-bpy.ore_pending_sessions = []
-bpy.ore_active_session_queue = []
-bpy.ore_complete_session_queue = []
-bpy.queue_selected = -1
-bpy.errorStartTime = -1.0
-bpy.infoError = False
-bpy.cancelError = False
-bpy.texturePackError = False
-bpy.linkedFileError = False
-bpy.uploadInProgress = False
-try:
- bpy.originalFileName = bpy.data.filepath
-except:
- bpy.originalFileName = 'untitled.blend'
-bpy.particleBakeWarning = False
-bpy.childParticleWarning = False
-bpy.simulationWarning = False
-bpy.file_format_warning = False
-bpy.ready = False
-
-
-def renderEngine(render_engine):
- bpy.utils.register_class(render_engine)
- return render_engine
-
-licenses = (
- ('1', 'CC by-nc-nd', 'Creative Commons: Attribution Non-Commercial No Derivatives'),
- ('2', 'CC by-nc-sa', 'Creative Commons: Attribution Non-Commercial Share Alike'),
- ('3', 'CC by-nd', 'Creative Commons: Attribution No Derivatives'),
- ('4', 'CC by-nc', 'Creative Commons: Attribution Non-Commercial'),
- ('5', 'CC by-sa', 'Creative Commons: Attribution Share Alike'),
- ('6', 'CC by', 'Creative Commons: Attribution'),
- ('7', 'Copyright', 'Copyright, no license specified'),
- )
-
-class ORESession(bpy.types.PropertyGroup):
- name = StringProperty(name='Name', description='Name of the session', maxlen=128, default='[session]')
-
-class ORESettings(bpy.types.PropertyGroup):
- username = StringProperty(name='E-mail', description='E-mail for Renderfarm.fi', maxlen=256, default='')
- password = StringProperty(name='Password', description='Renderfarm.fi password', maxlen=256, default='')
-
- shortdesc = StringProperty(name='Short description', description='A short description of the scene (100 characters)', maxlen=101, default='-')
- tags = StringProperty(name='Tags', description='A list of tags that best suit the animation', maxlen=102, default='')
- longdesc = StringProperty(name='Description', description='Description of the scene (2k)', maxlen=2048, default='')
- title = StringProperty(name='Title', description='Title for this session (128 characters)', maxlen=128, default='')
- url = StringProperty(name='Project URL', description='Project URL. Leave empty if not applicable', maxlen=256, default='')
- engine = StringProperty(name='Engine', description='The rendering engine that is used for rendering', maxlen=64, default='blender')
- samples = IntProperty(name='Samples', description='Number of samples that is used (Cycles only)', min=1, max=1000000, soft_min=1, soft_max=100000, default=100)
- subsamples = IntProperty(name='Subsample Frames', description='Number of subsample frames that is used (Cycles only)', min=1, max=1000000, soft_min=1, soft_max=1000, default=10)
- file_format = StringProperty(name='File format', description='File format used for the rendering', maxlen=30, default='PNG_FORMAT')
-
- parts = IntProperty(name='Parts/Frame', description='', min=1, max=1000, soft_min=1, soft_max=64, default=1)
- resox = IntProperty(name='Resolution X', description='X of render', min=1, max=10000, soft_min=1, soft_max=10000, default=1920)
- resoy = IntProperty(name='Resolution Y', description='Y of render', min=1, max=10000, soft_min=1, soft_max=10000, default=1080)
- memusage = IntProperty(name='Memory Usage', description='Estimated maximum memory usage during rendering in MB', min=1, max=6*1024, soft_min=1, soft_max=3*1024, default=256)
- start = IntProperty(name='Start Frame', description='Start Frame', default=1)
- end = IntProperty(name='End Frame', description='End Frame', default=250)
- fps = IntProperty(name='FPS', description='FPS', min=1, max=120, default=25)
-
- prepared = BoolProperty(name='Prepared', description='Set to True if preparation has been run', default=False)
- debug = BoolProperty(name='Debug', description='Verbose output in console', default=False)
- selected_session = IntProperty(name='Selected Session', description='The selected session', default=0)
- hasUnsupportedSimulation = BoolProperty(name='HasSimulation', description='Set to True if therea re unsupported simulations', default=False)
-
- inlicense = EnumProperty(items=licenses, name='Scene license', description='License speficied for the source files', default='1')
- outlicense = EnumProperty(items=licenses, name='Product license', description='License speficied for the output files', default='1')
- sessions = CollectionProperty(type=ORESession, name='Sessions', description='Sessions on Renderfarm.fi')
- completed_sessions = CollectionProperty(type=ORESession, name='Completed sessions', description='Sessions that have been already rendered')
- rejected_sessions = CollectionProperty(type=ORESession, name='Rejected sessions', description='Sessions that have been rejected')
- pending_sessions = CollectionProperty(type=ORESession, name='Pending sessions', description='Sessions that are waiting for approval')
- active_sessions = CollectionProperty(type=ORESession, name='Active sessions', description='Sessions that are currently rendering')
- all_sessions = CollectionProperty(type=ORESession, name='All sessions', description='List of all of the users sessions')
-
-# session struct
-
-
-class RENDERFARM_MT_Session(bpy.types.Menu):
- bl_label = "Show Session"
-
- def draw(self, context):
- layout = self.layout
- ore = context.scene.ore_render
-
- if (bpy.loginInserted == True):
- layout.operator('ore.completed_sessions')
- layout.operator('ore.accept_sessions')
- layout.operator('ore.active_sessions')
- layout.separator()
- layout.operator('ore.cancelled_sessions')
- else:
- row = layout.row()
- row.label(text="You must login first")
-
-
-class RenderfarmFi(bpy.types.RenderEngine):
- bl_idname = 'RENDERFARMFI_RENDER'
- bl_label = "Renderfarm.fi"
-
- def render(self, scene):
- print('Do test renders with Blender Render')
-
-def register():
- bpy.utils.register_module(__name__)
- bpy.types.Scene.ore_render = PointerProperty(type=ORESettings, name='ORE Render', description='ORE Render Settings')
-
-def unregister():
- bpy.utils.unregister_module(__name__)
-
-if __name__ == "__main__":
- register()
-
-# all panels, except render panel
-# Example of wrapping every class 'as is'
-from bl_ui import properties_scene
-for member in dir(properties_scene):
- subclass = getattr(properties_scene, member)
- try: subclass.COMPAT_ENGINES.add('RENDERFARMFI_RENDER')
- except: pass
-del properties_scene
-
-from bl_ui import properties_world
-for member in dir(properties_world):
- subclass = getattr(properties_world, member)
- try: subclass.COMPAT_ENGINES.add('RENDERFARMFI_RENDER')
- except: pass
-del properties_world
-
-from bl_ui import properties_material
-for member in dir(properties_material):
- subclass = getattr(properties_material, member)
- try: subclass.COMPAT_ENGINES.add('RENDERFARMFI_RENDER')
- except: pass
-del properties_material
-
-from bl_ui import properties_object
-for member in dir(properties_object):
- subclass = getattr(properties_object, member)
- try: subclass.COMPAT_ENGINES.add('RENDERFARMFI_RENDER')
- except: pass
-del properties_object
diff --git a/render_renderfarmfi/exceptions.py b/render_renderfarmfi/exceptions.py
deleted file mode 100644
index 4d62562c..00000000
--- a/render_renderfarmfi/exceptions.py
+++ /dev/null
@@ -1,41 +0,0 @@
-# ##### BEGIN GPL LICENSE BLOCK #####
-#
-# This program is free software; you can redistribute it and/or
-# modify it under the terms of the GNU General Public License
-# as published by the Free Software Foundation; either version 2
-# of the License, or (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program; if not, write to the Free Software Foundation,
-# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
-#
-# ##### END GPL LICENSE BLOCK #####
-
-class RenderfarmException(Exception):
- def __init__(self, msg="no reason given"):
- self.message = msg
- def __str__(self):
- return "RenderfarmException: "+self.message
-
-class LoginFailedException(Exception):
- def __init__(self, msg="no reason given"):
- self.message = msg
- def __str__(self):
- return "Login failed: "+self.message
-
-class SessionCancelFailedException(Exception):
- def __init__(self, msg="no reason given"):
- self.message = msg
- def __str__(self):
- return "Session could not be cancelled: "+self.message
-
-class GetSessionsFailedException(Exception):
- def __init__(self, msg="no reason given"):
- self.message = msg
- def __str__(self):
- return "Session List could not be fetched: "+self.message
diff --git a/render_renderfarmfi/operators.py b/render_renderfarmfi/operators.py
deleted file mode 100644
index 2c867934..00000000
--- a/render_renderfarmfi/operators.py
+++ /dev/null
@@ -1,347 +0,0 @@
-# ##### BEGIN GPL LICENSE BLOCK #####
-#
-# This program is free software; you can redistribute it and/or
-# modify it under the terms of the GNU General Public License
-# as published by the Free Software Foundation; either version 2
-# of the License, or (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program; if not, write to the Free Software Foundation,
-# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
-#
-# ##### END GPL LICENSE BLOCK #####
-
-import hashlib
-
-import bpy
-
-from .utils import _write_credentials, _read_credentials
-from .prepare import _prepare_scene
-from .upload import _ore_upload
-from .rpc import rffi, _do_refresh
-from .exceptions import LoginFailedException, SessionCancelFailedException
-
-class OpSwitchRenderfarm(bpy.types.Operator):
- bl_label = "Switch to Renderfarm.fi"
- bl_idname = "ore.switch_to_renderfarm_render"
-
- def execute(self, context):
- ore = bpy.context.scene.ore_render
- rd = bpy.context.scene.render
-
- ore.resox = rd.resolution_x
- ore.resoy = rd.resolution_y
- ore.fps = rd.fps
- ore.start = bpy.context.scene.frame_start
- ore.end = bpy.context.scene.frame_end
- if (rd.engine == 'CYCLES'):
- ore.samples = bpy.context.scene.cycles.samples
- ore.engine = 'cycles'
- else:
- ore.engine = 'blender'
- bpy.context.scene.render.engine = 'RENDERFARMFI_RENDER'
- return {'FINISHED'}
-
-class OpSwitchBlenderRender(bpy.types.Operator):
- bl_label = "Switch to local render"
- bl_idname = "ore.switch_to_local_render"
-
- def execute(self, context):
- rd = bpy.context.scene.render
- ore = bpy.context.scene.ore_render
- rd.resolution_x = ore.resox
- rd.resolution_y = ore.resoy
- rd.fps = ore.fps
- bpy.context.scene.frame_start = ore.start
- bpy.context.scene.frame_end = ore.end
- if (bpy.context.scene.ore_render.engine == 'cycles'):
- rd.engine = 'CYCLES'
- bpy.context.scene.cycles.samples = ore.samples
- else:
- bpy.context.scene.render.engine = 'BLENDER_RENDER'
- return {'FINISHED'}
-
-# Copies start & end frame + others from render settings to ore settings
-class OpCopySettings(bpy.types.Operator):
- bl_label = "Copy settings from current scene"
- bl_idname = "ore.copy_settings"
-
- def execute(self, context):
- sce = bpy.context.scene
- rd = sce.render
- ore = sce.ore_render
- ore.resox = rd.resolution_x
- ore.resoy = rd.resolution_y
- ore.start = sce.frame_start
- ore.end = sce.frame_end
- ore.fps = rd.fps
- return {'FINISHED'}
-
-class ORE_RefreshOp(bpy.types.Operator):
- bl_idname = 'ore.refresh_session_list'
- bl_label = 'Refresh'
-
- def execute(self, context):
- result = _do_refresh(self)
- if (result == 0):
- return {'FINISHED'}
- else:
- return {'CANCELLED'}
-
-class ORE_OpenDownloadLocation(bpy.types.Operator):
- bl_idname = 'ore.open_download_location'
- bl_label = 'Download new version for your platform'
-
- def execute(self, context):
- import webbrowser
- webbrowser.open(bpy.download_location)
- return {'FINISHED'}
-
-class ORE_CancelSession(bpy.types.Operator):
- bl_idname = 'ore.cancel_session'
- bl_label = 'Cancel Session'
-
- def execute(self, context):
- sce = context.scene
- ore = sce.ore_render
- if len(bpy.ore_complete_session_queue)>0:
- s = bpy.ore_complete_session_queue[ore.selected_session]
- try:
- rffi.cancel_session(self, s)
- except SessionCancelFailedException as scfe:
- print("sessioncancelfailedexception", scfe)
-
- return {'FINISHED'}
-
-class ORE_GetCompletedSessions(bpy.types.Operator):
- bl_idname = 'ore.completed_sessions'
- bl_label = 'Completed sessions'
-
- def execute(self, context):
- sce = context.scene
- ore = sce.ore_render
- bpy.queue_selected = 1
- bpy.ore_active_session_queue = bpy.ore_completed_sessions
- update_session_list(completed_sessions, ore)
-
- return {'FINISHED'}
-
-class ORE_GetCancelledSessions(bpy.types.Operator):
- bl_idname = 'ore.cancelled_sessions'
- bl_label = 'Cancelled sessions'
-
- def execute(self, context):
- sce = context.scene
- ore = sce.ore_render
- bpy.queue_selected = 4
- bpy.ore_active_session_queue = bpy.ore_cancelled_sessions
- update_session_list(cancelled_sessions, ore)
-
- return {'FINISHED'}
-
-class ORE_GetActiveSessions(bpy.types.Operator):
- bl_idname = 'ore.active_sessions'
- bl_label = 'Rendering sessions'
-
- def execute(self, context):
- sce = context.scene
- ore = sce.ore_render
- bpy.queue_selected = 2
- bpy.ore_active_session_queue = bpy.ore_active_sessions
- update_session_list(active_sessions, ore)
-
- return {'FINISHED'}
-
-class ORE_GetPendingSessions(bpy.types.Operator):
- bl_idname = 'ore.accept_sessions' # using ORE lingo in API. acceptQueue is session waiting for admin approval
- bl_label = 'Pending sessions'
-
- def execute(self, context):
- sce = context.scene
- ore = sce.ore_render
- bpy.queue_selected = 3
- bpy.ore_active_session_queue = bpy.ore_pending_sessions
- update_session_list(pending_sessions, ore)
-
- return {'FINISHED'}
-
-class ORE_CheckUpdate(bpy.types.Operator):
- bl_idname = 'ore.check_update'
- bl_label = 'Check for a new version'
-
- def execute(self, context):
- blenderproxy = xmlrpc.client.ServerProxy(r'http://xmlrpc.renderfarm.fi/renderfarmfi/blender', verbose=bpy.RFFI_VERBOSE)
- try:
- self.report({'INFO'}, 'Checking for newer version on Renderfarm.fi')
- dl_url = blenderproxy.blender.getCurrentVersion(bpy.CURRENT_VERSION)
- if len(dl_url['url']) > 0:
- self.report({'INFO'}, 'Found a newer version on Renderfarm.fi ' + dl_url['url'])
- bpy.download_location = dl_url['url']
- bpy.found_newer_version = True
- else:
- bpy.up_to_date = True
- self.report({'INFO'}, 'Done checking for newer version on Renderfarm.fi')
- except xmlrpc.client.Fault as f:
- print('ERROR:', f)
- self.report({'ERROR'}, 'An error occurred while checking for newer version on Renderfarm.fi: ' + f.faultString)
- except xmlrpc.client.ProtocolError as e:
- print('ERROR:', e)
- self.report({'ERROR'}, 'An HTTP error occurred while checking for newer version on Renderfarm.fi: ' + str(e.errcode) + ' ' + e.errmsg)
-
- return {'FINISHED'}
-
-class ORE_LoginOp(bpy.types.Operator):
- bl_idname = 'ore.login'
- bl_label = 'Login'
-
- def execute(self, context):
- sce = context.scene
- ore = sce.ore_render
-
- ore.password = ore.password.strip()
- ore.username = ore.username.strip()
-
- print("writing new credentials")
- _write_credentials(hashlib.md5(ore.password.encode() + ore.username.encode()).hexdigest(),ore.username)
- _read_credentials()
- ore.password = ''
- ore.username = ''
- bpy.loginInserted = False
- bpy.passwordCorrect = False
-
- try:
- _do_refresh(self, True)
-
- bpy.passwordCorrect = True
- bpy.loginInserted = True
-
- except LoginFailedException as v:
- bpy.ready = False
- bpy.loginInserted = False
- bpy.passwordCorrect = False
- ore.username = bpy.rffi_user
- _write_credentials('', '')
- _read_credentials()
- ore.hash = ''
- ore.password = ''
- self.report({'WARNING'}, "Incorrect login: " + str(v))
- print(v)
- return {'CANCELLED'}
-
- return {'FINISHED'}
-
-class ORE_ResetOp(bpy.types.Operator):
- bl_idname = "ore.reset"
- bl_label = "Reset Preparation"
-
- def execute(self, context):
- sce = context.scene
- ore = sce.ore_render
- ore.prepared = False
- bpy.loginInserted = False
- bpy.prepared = False
- ore.hash = ''
- ore.username = ''
- ore.passowrd = ''
- ore.longdesc = ''
- ore.shortdesc = '-'
- ore.tags = ''
- ore.title = ''
- ore.url = ''
-
- return {'FINISHED'}
-
-class ORE_TestRenderOp(bpy.types.Operator):
- bl_idname = "ore.test_render"
- bl_label = "Run a test render"
-
- def execute(self, context):
- rd = context.scene.render
- rd.engine = 'BLENDER_RENDER'
- rd.threads_mode = 'AUTO'
- rd.threads = 1
- bpy.ops.render.render()
- rd.threads_mode = 'FIXED'
- rd.threads = 1
- rd.engine = 'RENDERFARMFI_RENDER'
- return {'FINISHED'}
-
-class ORE_UploaderOp(bpy.types.Operator):
- bl_idname = "ore.upload"
- bl_label = "Render on Renderfarm.fi"
-
- def execute(self, context):
-
- bpy.uploadInProgress = True
- _prepare_scene()
-
- returnValue = _ore_upload(self, context)
- bpy.uploadInProgress = False
- return returnValue
-
-class ORE_UseBlenderReso(bpy.types.Operator):
- bl_idname = "ore.use_scene_settings"
- bl_label = "Use Scene settings"
-
- def execute(self, context):
- sce = context.scene
- ore = sce.ore_render
- rd = context.scene.render
-
- ore.resox = rd.resolution_x
- ore.resoy = rd.resolution_y
- ore.start = sce.frame_start
- ore.end = sce.frame_end
- ore.fps = rd.fps
-
- return {'FINISHED'}
-
-class ORE_UseCyclesRender(bpy.types.Operator):
- bl_idname = "ore.use_cycles_render"
- bl_label = "Cycles"
-
- def execute(self, context):
- context.scene.ore_render.engine = 'cycles'
- return {'FINISHED'}
-
-class ORE_UseBlenderRender(bpy.types.Operator):
- bl_idname = "ore.use_blender_render"
- bl_label = "Blender Internal"
-
- def execute(self, context):
- context.scene.ore_render.engine = 'blender'
- return {'FINISHED'}
-
-class ORE_ChangeUser(bpy.types.Operator):
- bl_idname = "ore.change_user"
- bl_label = "Change user"
-
- def execute(self, context):
- ore = context.scene.ore_render
- _write_credentials('', '')
- _read_credentials()
- ore.password = ''
- bpy.ore_sessions = []
- ore.hash = ''
- bpy.rffi_user = ''
- bpy.rffi_hash = ''
- bpy.rffi_creds_found = False
- bpy.passwordCorrect = False
- bpy.loginInserted = False
- bpy.rffi_accepts = False
- bpy.rffi_motd = ''
-
- return {'FINISHED'}
-
-class ORE_CheckStatus(bpy.types.Operator):
- bl_idname = "ore.check_status"
- bl_label = "Check Renderfarm.fi Accept status"
-
- def execute(self, context):
- rffi.check_status()
- return {'FINISHED'}
diff --git a/render_renderfarmfi/ore_session.py b/render_renderfarmfi/ore_session.py
deleted file mode 100644
index 0f8cc4ef..00000000
--- a/render_renderfarmfi/ore_session.py
+++ /dev/null
@@ -1,40 +0,0 @@
-# ##### BEGIN GPL LICENSE BLOCK #####
-#
-# This program is free software; you can redistribute it and/or
-# modify it under the terms of the GNU General Public License
-# as published by the Free Software Foundation; either version 2
-# of the License, or (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program; if not, write to the Free Software Foundation,
-# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
-#
-# ##### END GPL LICENSE BLOCK #####
-
-from math import floor
-
-class OreSession:
-
- def __init__(self, id, title):
- self.id = id
- self.title = title
- self.frames = 0
- self.startframe = 0
- self.endframe = 0
- self.rendertime = 0
- self.percentage = 0
-
- def percentageComplete(self):
- totFrames = self.endframe - self.startframe
- done = 0
- if totFrames != 0:
- done = floor((self.frames / totFrames)*100)
-
- if done > 100:
- done = 100
- return done
diff --git a/render_renderfarmfi/panels.py b/render_renderfarmfi/panels.py
deleted file mode 100644
index 117df785..00000000
--- a/render_renderfarmfi/panels.py
+++ /dev/null
@@ -1,274 +0,0 @@
-# ##### BEGIN GPL LICENSE BLOCK #####
-#
-# This program is free software; you can redistribute it and/or
-# modify it under the terms of the GNU General Public License
-# as published by the Free Software Foundation; either version 2
-# of the License, or (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program; if not, write to the Free Software Foundation,
-# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
-#
-# ##### END GPL LICENSE BLOCK #####
-
-import bpy
-import time
-
-from .utils import _read_credentials, check_status
-from .rpc import rffi
-from .exceptions import LoginFailedException
-
-class RenderButtonsPanel():
- bl_space_type = 'PROPERTIES'
- bl_region_type = 'WINDOW'
- bl_context = "render"
- # COMPAT_ENGINES must be defined in each subclass, external engines can add themselves here
-
-class EngineSelectPanel(RenderButtonsPanel, bpy.types.Panel):
- bl_idname = "OBJECT_PT_engineSelectPanel"
- bl_label = "Choose rendering mode"
- COMPAT_ENGINES = set(['RENDERFARMFI_RENDER'])
-
- def draw(self, context):
- layout = self.layout
- row = layout.row()
- row.operator("ore.switch_to_renderfarm_render", text="Renderfarm.fi", icon='WORLD')
- row.operator("ore.switch_to_local_render", text="Local computer", icon='BLENDER')
-
-class LOGIN_PT_RenderfarmFi(RenderButtonsPanel, bpy.types.Panel):
- bl_label = 'Login to Renderfarm.fi'
- COMPAT_ENGINES = set(['RENDERFARMFI_RENDER'])
-
- @classmethod
- def poll(cls, context):
- rd = context.scene.render
- return (rd.use_game_engine==False) and (rd.engine in cls.COMPAT_ENGINES)
-
- def draw(self, context):
-
- # login
- if not bpy.loginInserted:
- if _read_credentials():
- try:
- if rffi.login(None, True, False):
- bpy.passwordCorrect = True
- bpy.loginInserted = True
- except LoginFailedException:
- bpy.passwordCorrect = False
- bpy.loginInserted = False
-
- layout = self.layout
- ore = context.scene.ore_render
- check_status(ore)
-
- if bpy.passwordCorrect == False:
- row = layout.row()
- row.label(text="Email or password missing/incorrect", icon='ERROR')
- col = layout.column()
- col.prop(ore, 'username', icon=bpy.statusMessage['username'])
- col.prop(ore, 'password', icon=bpy.statusMessage['password'])
- layout.operator('ore.login')
- else:
- layout.label(text='Successfully logged in as:', icon='INFO')
- layout.label(text=bpy.rffi_user)
- layout.operator('ore.change_user')
-
- layout.label(text='Message from Renderfarm.fi', icon='INFO')
- layout.label(text=bpy.rffi_motd)
- if bpy.rffi_accepting:
- layout.label(text='Accepting sessions', icon='FILE_TICK')
- else:
- layout.label(text='Not accepting sessions', icon='ERROR')
- layout.operator('ore.check_status')
-
-class SESSIONS_PT_RenderfarmFi(RenderButtonsPanel, bpy.types.Panel):
- bl_label = 'My sessions'
- COMPAT_ENGINES = set(['RENDERFARMFI_RENDER'])
-
- @classmethod
- def poll(cls, context):
- rd = context.scene.render
- return (rd.use_game_engine==False) and (rd.engine in cls.COMPAT_ENGINES)
-
- def draw(self, context):
- ore = context.scene.ore_render
- if (bpy.passwordCorrect == True and bpy.loginInserted == True):
- layout = self.layout
-
- layout.template_list("UI_UL_list", "rederfarmfi_render", ore, 'all_sessions', ore, 'selected_session', rows=5)
- layout.operator('ore.cancel_session')
- if (bpy.cancelError == True):
- layout.label("This session cannot be cancelled")
- errorTime = time.time() - bpy.errorStartTime
- if (errorTime > 4):
- bpy.cancelError = False
- bpy.errorStartTime = -1
- layout.operator('ore.refresh_session_list')
- else:
- layout = self.layout
- layout.label(text="You must login first")
-
-class RENDER_PT_RenderfarmFi(RenderButtonsPanel, bpy.types.Panel):
- bl_label = "Settings"
- COMPAT_ENGINES = set(['RENDERFARMFI_RENDER'])
-
- @classmethod
- def poll(cls, context):
- rd = context.scene.render
- return (rd.use_game_engine==False) and (rd.engine in cls.COMPAT_ENGINES)
-
- def draw(self, context):
- layout = self.layout
- sce = context.scene
- ore = sce.ore_render
-
- if not bpy.rffi_accepting:
- layout.label(text="Renderfarm.fi is currently not accepting sessions.")
- return
-
- if (bpy.passwordCorrect == False or bpy.loginInserted == False):
- layout.label(text='You must login first')
- else:
- layout.prop(ore, 'title', icon=bpy.statusMessage['title'])
- layout.label(text="Example: Blue Skies project, scene 8")
- layout.row()
- layout.label(text="The description *MUST* mention some project")
- layout.label(text="The project can be a film, commercial work, portfolio or something similar")
- layout.label(text="We render only animation projects. Test renders are rejected.")
- # layout.prop(ore, 'shortdesc', icon=bpy.statusMessage['shortdesc'])
- layout.prop(ore, 'longdesc', icon=bpy.statusMessage['longdesc'])
- layout.label(text="Example: In this shot the main hero is running across a flowery field towards the castle.")
- layout.prop(ore, 'tags', icon=bpy.statusMessage['tags'])
- layout.label(text="Example: blue skies hero castle flowers grass particles")
- layout.prop(ore, 'url')
- layout.label(text="Example: www.sintel.org")
-
- #layout.label(text="Please verify your settings", icon='MODIFIER')
- row = layout.row()
- row = layout.row()
- #row.operator('ore.copy_settings')
- #row = layout.row()
-
- layout.label(text="Rendering engine")
- row = layout.row()
- if (ore.engine == 'blender'):
- row.operator('ore.use_blender_render', icon='FILE_TICK')
- row.operator('ore.use_cycles_render')
- elif (ore.engine == 'cycles' ):
- row.operator('ore.use_blender_render')
- row.operator('ore.use_cycles_render', icon='FILE_TICK')
- else:
- row.operator('ore.use_blender_render', icon='FILE_TICK')
- row.operator('ore.use_cycles_render')
-
- row = layout.row()
-
- layout.separator()
- row = layout.row()
- row.prop(ore, 'resox')
- row.prop(ore, 'resoy')
- row = layout.row()
- row.prop(ore, 'start')
- row.prop(ore, 'end')
- row = layout.row()
- row.prop(ore, 'fps')
- row = layout.row()
- if (ore.engine == 'cycles'):
- row.prop(ore, 'samples')
- row.prop(ore, 'subsamples')
- row = layout.row()
- row.prop(ore, 'memusage')
- #row.prop(ore, 'parts')
- layout.separator()
- row = layout.row()
-
- layout.label(text="Licenses", icon='FILE_REFRESH')
- row = layout.row()
- row.prop(ore, 'inlicense')
- row = layout.row()
- row.prop(ore, 'outlicense')
-
- check_status(ore)
- if (len(bpy.errors) > 0):
- bpy.ready = False
- else:
- bpy.ready = True
-
-class UPLOAD_PT_RenderfarmFi(RenderButtonsPanel, bpy.types.Panel):
- bl_label = "Upload to www.renderfarm.fi"
- COMPAT_ENGINES = set(['RENDERFARMFI_RENDER'])
-
- @classmethod
- def poll(cls, context):
- rd = context.scene.render
- return (rd.use_game_engine==False) and (rd.engine in cls.COMPAT_ENGINES)
-
- def draw(self, context):
- layout = self.layout
-
- if not bpy.rffi_accepting:
- layout.label(text="Renderfarm.fi is currently not accepting sessions.")
- return
-
- if (bpy.passwordCorrect == False or bpy.loginInserted == False):
- layout.label(text="You must login first")
- else:
- if (bpy.ready):
- layout.label(text="Policies", icon='LAMP')
- layout.label(text="- The animation must be at least 20 frames long")
- layout.label(text="- No still renders")
- layout.label(text="- No Python scripts")
- layout.label(text="- Memory usage max 4GB")
- layout.label(text="- If your render takes more than an hour / frame:")
- layout.label(text=" * No filter type composite nodes (blur, glare etc.)")
- layout.label(text=" * No SSS")
- layout.label(text=" * No Motion Blur")
-
- layout.separator()
-
- row = layout.row()
- if (bpy.uploadInProgress == True):
- layout.label(text="------------------------")
- layout.label(text="- Attempting upload... -")
- layout.label(text="------------------------")
- if (bpy.file_format_warning == True):
- layout.label(text="Your output format is HDR", icon='ERROR')
- layout.label(text="Right now we don't support this file format")
- layout.label(text="File format will be changed to PNG")
- if (bpy.texturePackError):
- layout.label(text="There was an error in packing external textures", icon='ERROR')
- layout.label(text="Make sure that all your textures exist on your computer")
- layout.label(text="The render will still work, but won't have the missing textures")
- layout.label(text="You may want to cancel your render above in \"My sessions\"")
- if (bpy.linkedFileError):
- layout.label(text="There was an error in appending linked .blend files", icon='ERROR')
- layout.label(text="Your render might not have all the external content")
- layout.label(text="You may want to cancel your render above in \"My sessions\"")
- if (bpy.particleBakeWarning):
- layout.label(text="You have a particle simulation", icon='ERROR')
- layout.label(text="All Emitter type particles must be baked")
- if (bpy.childParticleWarning):
- layout.label(text="Child particle mode changed!", icon='ERROR')
- layout.label(text="Renderfarm.fi requires that you use 'Interpolated'")
- if (bpy.simulationWarning):
- layout.label(text="There is a simulation!", icon='ERROR')
- layout.label(text="- Fluid simulations aren't supported")
- layout.label(text="- Collision simulations must be baked")
- row = layout.row()
- row.operator('ore.upload', icon='FILE_TICK')
- if (bpy.infoError == True):
- layout.label("You must fill in the scene info first", icon='ERROR')
- errorTime = time.time() - bpy.errorStartTime
- if (errorTime > 4):
- bpy.infoError = False
- bpy.errorStartTime = -1
- layout.label(text="Warning:", icon='LAMP')
- layout.label(text="Blender may seem frozen during the upload!")
- row.operator('ore.reset', icon='FILE_REFRESH')
- else:
- layout.label(text="Fill the scene information first")
diff --git a/render_renderfarmfi/prepare.py b/render_renderfarmfi/prepare.py
deleted file mode 100644
index 67770635..00000000
--- a/render_renderfarmfi/prepare.py
+++ /dev/null
@@ -1,189 +0,0 @@
-# ##### BEGIN GPL LICENSE BLOCK #####
-#
-# This program is free software; you can redistribute it and/or
-# modify it under the terms of the GNU General Public License
-# as published by the Free Software Foundation; either version 2
-# of the License, or (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program; if not, write to the Free Software Foundation,
-# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
-#
-# ##### END GPL LICENSE BLOCK #####
-
-import bpy
-import os
-
-def hasSSSMaterial():
- for m in bpy.data.materials:
- if m.subsurface_scattering.use:
- return True
- return False
-
-def tuneParticles():
- for p in bpy.data.particles:
- if (p.type == 'EMITTER'):
- bpy.particleBakeWarning = True
- if (p.type == 'HAIR'):
- if (p.child_type == 'SIMPLE'):
- p.child_type = 'INTERPOLATED'
- bpy.childParticleWarning = True
-
-def hasParticleSystem():
- if (len(bpy.data.particles) > 0):
- print("Found particle system")
- return True
- return False
-
-def hasSimulation(t):
- for o in bpy.data.objects:
- for m in o.modifiers:
- if isinstance(m, t):
- print("Found simulation: " + str(t))
- return True
- return False
-
-def hasFluidSimulation():
- return hasSimulation(bpy.types.FluidSimulationModifier)
-
-def hasSmokeSimulation():
- return hasSimulation(bpy.types.SmokeModifier)
-
-def hasClothSimulation():
- return hasSimulation(bpy.types.ClothModifier)
-
-def hasCollisionSimulation():
- return hasSimulation(bpy.types.CollisionModifier)
-
-def hasSoftbodySimulation():
- return hasSimulation(bpy.types.SoftBodyModifier)
-
-def hasUnsupportedSimulation():
- return hasSoftbodySimulation() or hasCollisionSimulation() or hasClothSimulation() or hasSmokeSimulation() or hasFluidSimulation()
-
-def isFilterNode(node):
- t = type(node)
- return t==bpy.types.CompositorNodeBlur or t==bpy.types.CompositorNodeDBlur
-
-def changeSettings():
-
- sce = bpy.context.scene
- rd = sce.render
- ore = sce.ore_render
-
- # Necessary settings for BURP
- rd.resolution_x = ore.resox
- rd.resolution_y = ore.resoy
- sce.frame_start = ore.start
- sce.frame_end = ore.end
- rd.fps = ore.fps
-
- bpy.file_format_warning = False
- bpy.simulationWarning = False
- bpy.texturePackError = False
- bpy.particleBakeWarning = False
- bpy.childParticleWarning = False
-
- if (rd.image_settings.file_format == 'HDR'):
- rd.image_settings.file_format = 'PNG'
- bpy.file_format_warning = True
-
- # Convert between Blender's image format and BURP's formats
- if (rd.image_settings.file_format == 'PNG'):
- ore.file_format = 'PNG_FORMAT'
- elif (rd.image_settings.file_format == 'OPEN_EXR'):
- ore.file_format = 'EXR_FORMAT'
- elif (rd.image_settings.file_format == 'OPEN_EXR_MULTILAYER'):
- ore.file_format = 'EXR_MULTILAYER_FORMAT'
- elif (rd.image_settings.file_format == 'HDR'):
- ore.file_format = 'PNG_FORMAT'
- else:
- ore.file_format = 'PNG_FORMAT'
-
- if (ore.engine == 'cycles'):
- bpy.context.scene.cycles.samples = ore.samples
-
- if (ore.subsamples <= 0):
- ore.subsamples = 1
-
- if (ore.samples / ore.subsamples < 100.0):
- ore.subsamples = float(ore.samples) / 100.0
-
- # Multipart support doesn' work if SSS is used
- if ((rd.use_sss == True and hasSSSMaterial()) and ore.parts > 1):
- ore.parts = 1;
-
- if (hasParticleSystem()):
- tuneParticles()
- else:
- bpy.particleBakeWarning = False
- bpy.childParticleWarning = False
-
- if (hasUnsupportedSimulation()):
- bpy.simulationWarning = True
- else:
- bpy.simulationWarning = False
-
-def _prepare_scene():
- changeSettings()
-
- print("Packing external textures...")
- try:
- bpy.ops.file.pack_all()
- bpy.texturePackError = False
- except Exception as e:
- bpy.texturePackError = True
- print(e)
-
- linkedData = bpy.utils.blend_paths()
- if (len(linkedData) > 0):
- print("Appending linked .blend files...")
- try:
- bpy.ops.object.make_local(type='ALL')
- bpy.linkedFileError = False
- except Exception as e:
- bpy.linkedFileError = True
- print(e)
- else:
- print("No external .blends used, skipping...")
-
- # Save with a different name
- print("Saving into a new file...")
- try:
- bpy.originalFileName = bpy.data.filepath
- except:
- bpy.originalFileName = 'untitled.blend'
- print("Original path is " + bpy.originalFileName)
- try:
- # If the filename is empty, we'll make one from the path of the user's resource folder
- if (len(bpy.originalFileName) == 0):
- print("No existing file path found, saving to autosave directory")
- savePath = bpy.utils.user_resource("AUTOSAVE")
- try:
- os.mkdir(savePath)
- except Exception as ex:
- print(ex)
- try:
- savePath = savePath + "_renderfarm"
- except Exception as ex:
- print(ex)
- try:
- bpy.ops.wm.save_mainfile(filepath=savePath)
- except Exception as ex:
- print(ex)
- else:
- print("Saving to current .blend directory")
- savePath = bpy.originalFileName
- savePath = savePath + "_renderfarm.blend"
- bpy.ops.wm.save_mainfile(filepath=savePath)
- except Exception as e:
- print(e)
-
- print(".blend prepared")
-
-
diff --git a/render_renderfarmfi/rpc.py b/render_renderfarmfi/rpc.py
deleted file mode 100644
index bb5d6a45..00000000
--- a/render_renderfarmfi/rpc.py
+++ /dev/null
@@ -1,198 +0,0 @@
-# ##### BEGIN GPL LICENSE BLOCK #####
-#
-# This program is free software; you can redistribute it and/or
-# modify it under the terms of the GNU General Public License
-# as published by the Free Software Foundation; either version 2
-# of the License, or (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program; if not, write to the Free Software Foundation,
-# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
-#
-# ##### END GPL LICENSE BLOCK #####
-
-import xmlrpc.client
-import imp
-import time
-import bpy
-
-from .exceptions import LoginFailedException, SessionCancelFailedException, \
- GetSessionsFailedException
-from .utils import _read_credentials, _xmlsessions_to_oresessions, \
- update_complete_session_list
-
-def _is_dev():
- is_dev = False
- pwfile = bpy.utils.user_resource('CONFIG', 'rffi', True)
- pwmod = None
- try:
- pwmod = imp.find_module('rffi_dev',[pwfile])
- try:
- user_creds = imp.load_module('rffi_dev', pwmod[0], pwmod[1], pwmod[2])
- if 'dev' in dir(user_creds) and user_creds.dev:
- is_dev = True
- except ImportError:
- is_dev = False
- finally:
- if pwmod and pwmod[0]: pwmod[0].close()
- except ImportError:
- is_dev = False
- finally:
- if pwmod and pwmod[0]: pwmod[0].close()
-
- return is_dev
-
-def _be_verbose():
- be_verbose = False
- pwfile = bpy.utils.user_resource('CONFIG', 'rffi', True)
- pwmod = None
- try:
- pwmod = imp.find_module('rffi_dev',[pwfile])
- try:
- user_creds = imp.load_module('rffi_dev', pwmod[0], pwmod[1], pwmod[2])
- if 'verbose' in dir(user_creds) and user_creds.verbose:
- be_verbose = True
- except ImportError:
- be_verbose = False
- finally:
- if pwmod and pwmod[0]: pwmod[0].close()
- except ImportError:
- be_verbose = False
- finally:
- if pwmod and pwmod[0]: pwmod[0].close()
-
- return be_verbose
-
-RFFI_DEV = _is_dev()
-RFFI_VERBOSE = _be_verbose()
-
-if RFFI_DEV:
- print("DEVELOPER MODE")
- rffi_xmlrpc_secure = r'http://renderfarm.server/burp/xmlrpc'
- rffi_xmlrpc = r'http://renderfarm.server/burp/xmlrpc'
- rffi_xmlrpc_upload = 'renderfarm.server'
-else:
- rffi_xmlrpc_secure = r'http://xmlrpc.renderfarm.fi/burp/xmlrpc'
- rffi_xmlrpc = r'http://xmlrpc.renderfarm.fi/burp/xmlrpc'
- rffi_xmlrpc_upload = 'xmlrpc.renderfarm.fi'
-
-
-def _get_proxy():
- proxy = xmlrpc.client.ServerProxy(rffi_xmlrpc, verbose=RFFI_VERBOSE)
- return proxy
-
-def _get_secure_proxy():
- proxy = xmlrpc.client.ServerProxy(rffi_xmlrpc_secure, verbose=RFFI_VERBOSE)
- return proxy
-
-def _do_refresh(op, rethrow=False, print_errors=True):
- sce = bpy.context.scene
- ore = sce.ore_render
-
- if _read_credentials():
- try:
- bpy.ore_sessions = []
- bpy.ore_pending_sessions = []
- bpy.ore_active_sessions = []
- bpy.ore_completed_sessions = []
- bpy.ore_cancelled_sessions = []
- update_complete_session_list(ore)
-
- res = rffi.login(op, True, print_errors)
- userid = res['userID']
-
- sessions = rffi.get_sessions(userid, 'accept', 0, 100, 'full')
- bpy.ore_sessions = _xmlsessions_to_oresessions(sessions, stage='Pending')
- bpy.ore_pending_sessions = bpy.ore_sessions
-
- sessions = rffi.get_sessions(userid, 'completed', 0, 100, 'full')
- bpy.ore_sessions = _xmlsessions_to_oresessions(sessions, stage='Completed')
- bpy.ore_completed_sessions = bpy.ore_sessions
-
- sessions = rffi.get_sessions(userid, 'cancelled', 0, 100, 'full')
- bpy.ore_sessions = _xmlsessions_to_oresessions(sessions, stage='Cancelled')
- bpy.ore_cancelled_sessions = bpy.ore_sessions
-
- sessions = rffi.get_sessions(userid, 'render', 0, 100, 'full')
- bpy.ore_sessions = _xmlsessions_to_oresessions(sessions, stage='Rendering')
- bpy.ore_active_sessions = bpy.ore_sessions
-
- update_complete_session_list(ore)
-
- return 0
- except LoginFailedException as lfe:
- if print_errors: print("_do_refresh login failed", lfe)
- if rethrow:
- raise lfe
- return 1
- else:
- return 1
-
-
-class RffiRpc(object):
- def __init__(self):
- self.proxy = _get_proxy()
- self.sproxy = _get_secure_proxy()
- self.res = None
-
- def login(self, op, rethrow=False, print_errors=True):
- self.res = None
-
- if bpy.rffi_user=='':
- raise LoginFailedException("No email address given")
-
- if bpy.rffi_hash=='':
- raise LoginFailedException("No password given")
-
- try:
- self.res = self.sproxy.auth.getSessionKey(bpy.rffi_user, bpy.rffi_hash)
- except xmlrpc.client.Error as v:
- if op: op.report({'WARNING'}, "Error at login : " + str(type(v)) + " -> " + str(v.faultCode) + ": " + v.faultString)
- if print_errors: print("Error at login: ",v)
- if rethrow:
- vstr = str(v)
- if "Failed to invoke method getSessionKey" in vstr:
- raise LoginFailedException('User '+bpy.rffi_user+' doesn\'t exist')
- raise LoginFailedException(v.faultString)
- return None
- except Exception as v:
- if op: op.report({'WARNING'}, "Non XMLRPC Error at login: " + str(v))
- if print_errors: print(v)
- if rethrow:
- raise LoginFailedException(str(v))
- return None
- return self.res
-
- def get_sessions(self, user, queue, start, end, level):
- try:
- sessions = self.proxy.session.getSessions(user, queue, start, end, level)
- except xmlrpc.client.Error as v:
- raise GetSessionsFailedException(str(v))
- return sessions
-
- def cancel_session(self, op, session):
- res = self.login(op)
- if res:
- try:
- key = res['key']
- userid = res['userId']
- res = self.proxy.session.cancelSession(userid, key, session.id)
- _do_refresh(op, True)
- op.report({'INFO'}, 'Session ' + session.title + ' with id ' + str(session.id) + ' cancelled')
- except xmlrpc.client.Error as v:
- op.report({'ERROR'}, 'Could not cancel session ' + session.title + ' with id ' + str(session.id))
- bpy.cancelError = True
- bpy.errorStartTime = time.time()
- raise SessionCancelFailedException(str(v))
-
- def check_status(self):
- res = self.proxy.service.motd()
- bpy.rffi_accepting = res['accepting']
- bpy.rffi_motd = res['motd']
-
-rffi = RffiRpc()
diff --git a/render_renderfarmfi/upload.py b/render_renderfarmfi/upload.py
deleted file mode 100644
index 5bf28bed..00000000
--- a/render_renderfarmfi/upload.py
+++ /dev/null
@@ -1,193 +0,0 @@
-# ##### BEGIN GPL LICENSE BLOCK #####
-#
-# This program is free software; you can redistribute it and/or
-# modify it under the terms of the GNU General Public License
-# as published by the Free Software Foundation; either version 2
-# of the License, or (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program; if not, write to the Free Software Foundation,
-# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
-#
-# ##### END GPL LICENSE BLOCK #####
-
-import xmlrpc.client
-import http.client
-import hashlib
-from os.path import isabs, isfile
-import time
-
-import bpy
-
-from .utils import _read_credentials
-from .rpc import rffi, _do_refresh, rffi_xmlrpc_upload, rffi_xmlrpc, RFFI_VERBOSE
-
-def _random_string(length):
- import string
- import random
- return ''.join(random.choice(string.ascii_letters) for ii in range(length + 1))
-
-def _encode_multipart_data(data, files):
- boundary = _random_string(30)
-
- def get_content_type(filename):
- return 'application/octet-stream' # default this
-
- def encode_field(field_name):
- return ('--' + boundary,
- 'Content-Disposition: form-data; name="%s"' % field_name,
- '', str(data[field_name]))
-
- def encode_file(field_name):
- filename = files [field_name]
- fcontent = None
- print('encoding', field_name)
- try:
- fcontent = str(open(filename, 'rb').read(), encoding='iso-8859-1')
- except Exception as e:
- print('Trouble in paradise', e)
- return ('--' + boundary,
- 'Content-Disposition: form-data; name="%s"; filename="%s"' % (field_name, filename),
- 'Content-Type: %s' % get_content_type(filename),
- '', fcontent)
-
- lines = []
- for name in data:
- lines.extend(encode_field(name))
- for name in files:
- lines.extend(encode_file(name))
- lines.extend(('--%s--' % boundary, ''))
- print("joining lines into body")
- body = '\r\n'.join(lines)
-
- headers = {'content-type': 'multipart/form-data; boundary=' + boundary,
- 'content-length': str(len(body))}
-
- print("headers and body ready")
-
- return body, headers
-
-def _send_post(data, files):
- print("Forming connection for post")
- connection = http.client.HTTPConnection(rffi_xmlrpc_upload)
- print("Requesting")
- connection.request('POST', '/burp/storage', *_encode_multipart_data(data, files)) # was /file
- print("Getting response")
- response = connection.getresponse()
- print("Reading response")
- res = response.read()
- return res
-
-def _md5_for_file(filepath):
- md5hash = hashlib.md5()
- blocksize = 0x10000
- f = open(filepath, "rb")
- while True:
- data = f.read(blocksize)
- if not data:
- break
- md5hash.update(data)
- return md5hash.hexdigest()
-
-def _upload_file(key, userid, sessionid, path):
- print("Asserting absolute path")
- assert isabs(path)
- print("Asserting path is a file")
- assert isfile(path)
- data = {
- 'userId': str(userid),
- 'sessionKey': key,
- 'sessionId': sessionid,
- 'md5sum': _md5_for_file(path)
- }
- files = {
- 'blenderfile': path
- }
- r = _send_post(data, files)
-
- return r
-
-def _run_upload(key, userid, sessionid, path):
- print("Starting upload");
- r = _upload_file(key, userid, sessionid, path)
- print("Upload finished")
- o = xmlrpc.client.loads(r)
- print("Loaded xmlrpc response")
- return o[0][0]
-
-def _ore_upload(op, context):
- sce = context.scene
- ore = sce.ore_render
-
- if not bpy.ready:
- op.report({'ERROR'}, 'Your user or scene information is not complete')
- bpy.infoError = True
- bpy.errorStartTime = time.time()
- bpy.context.scene.render.engine = 'RENDERFARMFI_RENDER'
- return {'CANCELLED'}
- try:
- _read_credentials()
- res = rffi.login(op, True)
- key = res['key']
- userid = res['userId']
- print("Creating server proxy")
- proxy = xmlrpc.client.ServerProxy(rffi_xmlrpc, verbose=RFFI_VERBOSE)
- proxy._ServerProxy__transport.user_agent = 'Renderfarm.fi Uploader/%s' % (bpy.CURRENT_VERSION)
- print("Creating a new session")
- res = proxy.session.createSession(userid, key) # This may use an existing, non-rendered session. Prevents spamming in case the upload fails for some reason
- sessionid = res['sessionId']
- key = res['key']
- print("Session id is " + str(sessionid))
- res = _run_upload(key, userid, sessionid, bpy.data.filepath)
- print("Getting fileid from xmlrpc response data")
- fileid = int(res['fileId'])
- print("Sending session details for session " + str(sessionid) + " with fileid " + str(fileid))
- res = proxy.session.setTitle(userid, res['key'], sessionid, ore.title)
- res = proxy.session.setLongDescription(userid, res['key'], sessionid, ore.longdesc)
- res = proxy.session.setShortDescription(userid, res['key'], sessionid, ore.shortdesc)
- if len(ore.url)>0:
- res = proxy.session.setExternalURLs(userid, res['key'], sessionid, ore.url)
- res = proxy.session.setStartFrame(userid, res['key'], sessionid, ore.start)
- res = proxy.session.setEndFrame(userid, res['key'], sessionid, ore.end)
- res = proxy.session.setSplit(userid, res['key'], sessionid, ore.parts)
- res = proxy.session.setMemoryLimit(userid, res['key'], sessionid, ore.memusage)
- res = proxy.session.setXSize(userid, res['key'], sessionid, ore.resox)
- res = proxy.session.setYSize(userid, res['key'], sessionid, ore.resoy)
- res = proxy.session.setFrameRate(userid, res['key'], sessionid, ore.fps)
- res = proxy.session.setFrameFormat(userid, res['key'], sessionid, ore.file_format)
- res = proxy.session.setRenderer(userid, res['key'], sessionid, ore.engine)
- res = proxy.session.setSamples(userid, res['key'], sessionid, ore.samples)
- res = proxy.session.setSubSamples(userid, res['key'], sessionid, ore.subsamples)
- if (ore.engine == 'cycles'):
- res = proxy.session.setReplication(userid, res['key'], sessionid, 1)
- if ore.subsamples > 1:
- res = proxy.session.setStitcher(userid, res['key'], sessionid, 'AVERAGE')
- else:
- res = proxy.session.setReplication(userid, res['key'], sessionid, 3)
- res = proxy.session.setOutputLicense(userid, res['key'], sessionid, int(ore.outlicense))
- res = proxy.session.setInputLicense(userid, res['key'], sessionid, int(ore.inlicense))
- print("Setting primary input file")
- res = proxy.session.setPrimaryInputFile(userid, res['key'], sessionid, fileid)
- print("Submitting session")
- res = proxy.session.submit(userid, res['key'], sessionid)
- print("Session submitted")
- op.report({'INFO'}, 'Submission sent to Renderfarm.fi')
- except xmlrpc.client.Error as v:
- bpy.context.scene.render.engine = 'RENDERFARMFI_RENDER'
- print('ERROR:', v)
- op.report({'ERROR'}, 'An XMLRPC error occurred while sending submission to Renderfarm.fi')
- except Exception as e:
- bpy.context.scene.render.engine = 'RENDERFARMFI_RENDER'
- print('Unhandled error:', e)
- op.report({'ERROR'}, 'A generic error occurred while sending submission to Renderfarm.fi')
-
- bpy.context.scene.render.engine = 'RENDERFARMFI_RENDER'
- _do_refresh(op)
- return {'FINISHED'}
-
-
diff --git a/render_renderfarmfi/utils.py b/render_renderfarmfi/utils.py
deleted file mode 100644
index 8afcfd78..00000000
--- a/render_renderfarmfi/utils.py
+++ /dev/null
@@ -1,137 +0,0 @@
-# ##### BEGIN GPL LICENSE BLOCK #####
-#
-# This program is free software; you can redistribute it and/or
-# modify it under the terms of the GNU General Public License
-# as published by the Free Software Foundation; either version 2
-# of the License, or (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program; if not, write to the Free Software Foundation,
-# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
-#
-# ##### END GPL LICENSE BLOCK #####
-
-import imp
-
-from os.path import join
-
-import bpy
-
-from .ore_session import OreSession
-
-def _write_credentials(hash, user):
- with open(join(bpy.utils.user_resource('CONFIG', 'rffi', True), 'rffi_credentials.py'), 'w') as pwfile:
- pwfile.write('hash=\''+hash+'\'\n')
- pwfile.write('user=\''+user+'\'')
-
-
-def _read_credentials():
- bpy.rffi_creds_found = False
- bpy.rffi_user = ''
- bpy.rffi_hash = ''
-
- pwfile = bpy.utils.user_resource('CONFIG', 'rffi', True)
- try:
- pwmod = imp.find_module('rffi_credentials',[pwfile])
- except ImportError:
- _write_credentials('', '')
- pwmod = imp.find_module('rffi_credentials',[pwfile])
- try:
- user_creds = imp.load_module('rffi_credentials', pwmod[0], pwmod[1], pwmod[2])
- bpy.rffi_user = user_creds.user
- bpy.rffi_hash = user_creds.hash
- bpy.rffi_creds_found = True
- except ImportError:
- # doesn't exist yet, write template
- _write_credentials('', '')
- pwfile = bpy.utils.user_resource('CONFIG', 'rffi', True)
- pwmod = imp.find_module('rffi_credentials',[pwfile])
- try:
- user_creds = imp.load_module('rffi_credentials', pwmod[0], pwmod[1], pwmod[2])
- bpy.rffi_user = user_creds.user
- bpy.rffi_hash = user_creds.hash
- bpy.rffi_creds_found = True
- except Exception as e2:
- print("Couldn't write rffi_credentials.py", e2)
- finally:
- if pwmod and pwmod[0]: pwmod[0].close()
-
- return bpy.rffi_creds_found
-
-
-def _xmlsessions_to_oresessions(sessions, stage=None):
- output = []
- for session in sessions:
- s = session['title']
- if stage:
- s = s + ' (' + stage + ')'
- sinfo = OreSession(session['sessionId'], s)
- if stage in {'Rendering', 'Completed', 'Active'}:
- sinfo.frames = session['framesRendered']
- sinfo.startframe = session['startFrame']
- sinfo.endframe = session['endFrame']
- output.append(sinfo)
- return output
-
-
-def update_session_list(session_list, ore):
- while(len(session_list) > 0):
- session_list.remove(0)
-
- for s in bpy.ore_active_session_queue:
- session_list.add()
- session = session_list[-1]
- session.name = s.title + ' [' + str(s.percentageComplete()) + '% complete]'
-
-def update_complete_session_list(ore):
- bpy.ore_active_session_queue = bpy.ore_cancelled_sessions
- update_session_list(ore.rejected_sessions, ore)
- bpy.ore_active_session_queue = bpy.ore_active_sessions
- update_session_list(ore.active_sessions, ore)
- bpy.ore_active_session_queue = bpy.ore_pending_sessions
- update_session_list(ore.pending_sessions, ore)
- bpy.ore_active_session_queue = bpy.ore_completed_sessions
- update_session_list(ore.completed_sessions, ore)
-
- bpy.ore_complete_session_queue = []
- bpy.ore_complete_session_queue.extend(bpy.ore_pending_sessions)
- bpy.ore_complete_session_queue.extend(bpy.ore_active_sessions)
- bpy.ore_complete_session_queue.extend(bpy.ore_completed_sessions)
- bpy.ore_complete_session_queue.extend(bpy.ore_cancelled_sessions)
-
- bpy.ore_active_session_queue = bpy.ore_complete_session_queue
- update_session_list(ore.all_sessions, ore)
-
-def check_status(ore):
- bpy.errors = []
-
- if bpy.rffi_creds_found == False and bpy.rffi_hash == '':
- bpy.errors.append('missing_creds')
-
- if '' in {ore.title, ore.longdesc, ore.shortdesc}:
- bpy.errors.append('missing_desc')
- bpy.infoError = True
-
- set_status('username', bpy.rffi_hash=='' and ore.username=='')
- set_status('password', bpy.rffi_hash=='' and ore.password=='')
-
- set_status('title', ore.title=='')
- set_status('longdesc', ore.longdesc=='')
- set_status('shortdesc', ore.shortdesc=='')
-
-
-def set_status(property, status):
- if status:
- bpy.statusMessage[property] = 'ERROR'
- else:
- bpy.statusMessage[property] = 'TRIA_RIGHT'
-
-def show_status(layoutform, property, message):
- if bpy.statusMessage[property] == 'ERROR':
- layoutform.label(text='', icon='ERROR')
-
diff --git a/space_view3d_copy_attributes.py b/space_view3d_copy_attributes.py
index d6edff27..91f6ea6c 100644
--- a/space_view3d_copy_attributes.py
+++ b/space_view3d_copy_attributes.py
@@ -161,6 +161,8 @@ def pVisScaExec(bone, active, context):
def pDrwExec(bone, active, context):
bone.custom_shape = active.custom_shape
+ bone.use_custom_shape_bone_size = active.use_custom_shape_bone_size
+ bone.custom_shape_scale = active.custom_shape_scale
bone.bone.show_wire = active.bone.show_wire
@@ -188,7 +190,7 @@ def pIKsExec(bone, active, context):
def pBBonesExec(bone, active, context):
object = active.id_data
generic_copy(
- object.data.bones[active.name],
+ object.data.bones[active.name],
object.data.bones[bone.name],
"bbone_")
diff --git a/ui_layer_manager.py b/ui_layer_manager.py
index b4442d99..876e20cd 100644
--- a/ui_layer_manager.py
+++ b/ui_layer_manager.py
@@ -22,7 +22,7 @@ bl_info = {
"name": "Layer Management",
"author": "Alfonso Annarumma, Bastien Montagne",
"version": (1, 5, 2),
- "blender": (2, 72, 0),
+ "blender": (2, 76, 0),
"location": "Toolshelf > Layers Tab",
"warning": "",
"description": "Display and Edit Layer Name",
@@ -372,6 +372,7 @@ class SCENE_PT_namedlayer_layers(bpy.types.Panel):
bl_label = "Layer Management"
bl_options = {'DEFAULT_CLOSED'}
bl_category = "Layers"
+ bl_context = "objectmode"
@classmethod
def poll(self, context):
@@ -519,6 +520,7 @@ class SCENE_UL_namedlayer_groups(UIList):
class SCENE_PT_namedlayer_groups(bpy.types.Panel):
bl_space_type = 'VIEW_3D'
bl_region_type = 'TOOLS'
+ bl_context = "objectmode"
bl_category = "Layers"
bl_label = "Layer Groups"
bl_options = {'DEFAULT_CLOSED'}