Welcome to mirror list, hosted at ThFree Co, Russian Federation.

git.blender.org/blender.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
Diffstat (limited to 'intern/python/modules')
-rw-r--r--intern/python/modules/Blender/BGL.py1
-rw-r--r--intern/python/modules/Blender/Camera.py106
-rw-r--r--intern/python/modules/Blender/Draw.py1
-rw-r--r--intern/python/modules/Blender/Image.py52
-rw-r--r--intern/python/modules/Blender/Ipo.py279
-rw-r--r--intern/python/modules/Blender/Lamp.py168
-rw-r--r--intern/python/modules/Blender/Material.py251
-rw-r--r--intern/python/modules/Blender/Mesh.py250
-rw-r--r--intern/python/modules/Blender/NMesh.py192
-rw-r--r--intern/python/modules/Blender/Object.py391
-rw-r--r--intern/python/modules/Blender/Scene.py143
-rw-r--r--intern/python/modules/Blender/Text.py57
-rw-r--r--intern/python/modules/Blender/Types.py1
-rw-r--r--intern/python/modules/Blender/Window.py65
-rw-r--r--intern/python/modules/Blender/World.py157
-rw-r--r--intern/python/modules/Blender/__init__.py23
-rw-r--r--intern/python/modules/Blender/shadow.py195
-rw-r--r--intern/python/modules/Blender/sys.py20
-rw-r--r--intern/python/modules/Converter/__init__.py4
-rw-r--r--intern/python/modules/Converter/bimporter.py34
-rw-r--r--intern/python/modules/Converter/importer/VRMLimporter.py988
-rw-r--r--intern/python/modules/Converter/importer/__init__.py17
-rw-r--r--intern/python/modules/Converter/importloader.py23
-rw-r--r--intern/python/modules/TextTools/Constants/Sets.py39
-rw-r--r--intern/python/modules/TextTools/Constants/TagTables.py348
-rw-r--r--intern/python/modules/TextTools/Constants/__init__.py1
-rw-r--r--intern/python/modules/TextTools/TextTools.py766
-rw-r--r--intern/python/modules/TextTools/__init__.py48
-rw-r--r--intern/python/modules/TextTools/mxTextTools/__init__.py17
-rw-r--r--intern/python/modules/VRMLmain.py3
-rw-r--r--intern/python/modules/beta/Objects.py167
-rw-r--r--intern/python/modules/beta/Scenegraph.py182
-rw-r--r--intern/python/modules/beta/__init__.py1
-rw-r--r--intern/python/modules/blenderos.py24
-rw-r--r--intern/python/modules/mcf/__init__.py6
-rw-r--r--intern/python/modules/mcf/utils/__init__.py6
-rw-r--r--intern/python/modules/mcf/utils/collapse.py169
-rw-r--r--intern/python/modules/mcf/utils/copy_extend.py83
-rw-r--r--intern/python/modules/mcf/utils/cpickle_extend.py190
-rw-r--r--intern/python/modules/mcf/utils/dictbool.py80
-rw-r--r--intern/python/modules/mcf/utils/dsort.py91
-rw-r--r--intern/python/modules/mcf/utils/dummy.py91
-rw-r--r--intern/python/modules/mcf/utils/err.py37
-rw-r--r--intern/python/modules/mcf/utils/extpkl.py19
-rw-r--r--intern/python/modules/mcf/utils/fileassociation.py65
-rw-r--r--intern/python/modules/mcf/utils/findourfile.py30
-rw-r--r--intern/python/modules/mcf/utils/hier_rx.py201
-rw-r--r--intern/python/modules/mcf/utils/hierdummy.py16
-rw-r--r--intern/python/modules/mcf/utils/hierobj.py133
-rw-r--r--intern/python/modules/mcf/utils/in_place_ops.py38
-rw-r--r--intern/python/modules/mcf/utils/namespace.py224
-rw-r--r--intern/python/modules/mcf/utils/quote.py78
-rw-r--r--intern/python/modules/mcf/utils/rangeval.py64
-rw-r--r--intern/python/modules/mcf/utils/regutils_ex.py158
-rw-r--r--intern/python/modules/mcf/utils/reloader.py33
-rw-r--r--intern/python/modules/mcf/utils/singletonlist.py104
-rw-r--r--intern/python/modules/mcf/utils/tempclassmodule.py251
-rw-r--r--intern/python/modules/mcf/utils/typeclasses.py50
-rw-r--r--intern/python/modules/mcf/utils/userquery.py17
-rw-r--r--intern/python/modules/mcf/utils/ver.py17
-rw-r--r--intern/python/modules/mcf/utils/walkerable.py46
-rw-r--r--intern/python/modules/simpleparse/__init__.py5
-rw-r--r--intern/python/modules/simpleparse/bootstrap.py279
-rw-r--r--intern/python/modules/simpleparse/generator.py432
-rw-r--r--intern/python/modules/util/README.txt13
-rw-r--r--intern/python/modules/util/__init__.py2
-rw-r--r--intern/python/modules/util/quat.py109
-rw-r--r--intern/python/modules/util/tree.py215
-rw-r--r--intern/python/modules/util/vect.py480
-rw-r--r--intern/python/modules/util/vectools.py142
-rw-r--r--intern/python/modules/vrml/__init__.py1
-rw-r--r--intern/python/modules/vrml/basenodes.py974
-rw-r--r--intern/python/modules/vrml/fieldcoercian.py310
-rw-r--r--intern/python/modules/vrml/loader.py97
-rw-r--r--intern/python/modules/vrml/parser.py426
-rw-r--r--intern/python/modules/vrml/scenegraph.py833
-rw-r--r--intern/python/modules/vrml/utils/__init__.py1
-rw-r--r--intern/python/modules/vrml/utils/collapse.py169
-rw-r--r--intern/python/modules/vrml/utils/err.py37
-rw-r--r--intern/python/modules/vrml/utils/namespace.py225
-rw-r--r--intern/python/modules/vrml/utils/typeclasses.py50
81 files changed, 12111 insertions, 0 deletions
diff --git a/intern/python/modules/Blender/BGL.py b/intern/python/modules/Blender/BGL.py
new file mode 100644
index 00000000000..033b3560e4f
--- /dev/null
+++ b/intern/python/modules/Blender/BGL.py
@@ -0,0 +1 @@
+from _Blender.BGL import *
diff --git a/intern/python/modules/Blender/Camera.py b/intern/python/modules/Blender/Camera.py
new file mode 100644
index 00000000000..33f615fc0da
--- /dev/null
+++ b/intern/python/modules/Blender/Camera.py
@@ -0,0 +1,106 @@
+"""The Blender Camera module
+
+This module provides access to **Camera** objects in Blender
+
+Example::
+
+ from Blender import Camera, Object, Scene
+ c = Camera.New('ortho') # create new ortho camera data
+ c.lens = 35.0 # set lens value
+ cur = Scene.getCurrent() # get current Scene
+ ob = Object.New('Camera') # make camera object
+ ob.link(c) # link camera data with this object
+ cur.link(ob) # link object into scene
+ cur.setCurrentCamera(ob) # make this camera the active
+"""
+
+import shadow
+import _Blender.Camera as _Camera
+
+
+class Camera(shadow.hasIPO):
+ """Wrapper for Camera DataBlock
+
+ Attributes
+
+ lens -- The lens value
+
+ clipStart -- The clipping start of the view frustum
+
+ clipEnd -- The end clipping plane of the view frustum
+
+ type -- The camera type:
+ 0: perspective camera,
+ 1: orthogonal camera - (see Types)
+
+ mode -- Drawing mode; see Modes
+"""
+
+ _emulation = {'Lens' : "lens",
+ 'ClSta' : "clipStart",
+ 'ClEnd' : "clipEnd",
+ }
+
+ Types = {'persp' : 0,
+ 'ortho' : 1,
+ }
+
+ Modes = {'showLimits' : 1,
+ 'showMist' : 2,
+ }
+
+ def __init__(self, object):
+ self._object = object
+
+ def getType(self):
+ """Returns camera type: "ortho" or "persp" """
+ if self.type == self.Types['ortho']:
+ return 'ortho'
+ else:
+ return 'persp'
+
+ def setType(self, type):
+ """Sets Camera type to 'type' which must be one of ["persp", "ortho"]"""
+ self._object.type = self.Types[type]
+
+ def setMode(self, *modes):
+ """Sets Camera modes *the nice way*, instead of direct access
+of the 'mode' member.
+This function takes a variable number of string arguments of the types
+listed in self.Modes.
+
+
+Example::
+
+ c = Camera.New()
+ c.setMode('showMist', 'showLimits')
+"""
+ flags = 0
+ try:
+ for a in modes:
+ flags |= self.Modes[a]
+ except:
+ raise TypeError, "mode must be one of %s" % self.Modes.keys()
+ self.mode = flags
+
+ def __repr__(self):
+ return "[Camera \"%s\"]" % self.name
+
+def New(type = 'persp'):
+ """Creates new camera Object and returns it. 'type', if specified,
+must be one of Types"""
+ cam = Camera(_Camera.New())
+ cam.setType(type)
+ return cam
+
+def get(name = None):
+ """Returns the Camera with name 'name', if given. Otherwise, a list
+of all Cameras is returned"""
+ if name:
+ return Camera(_Camera.get(name))
+ else:
+ return shadow._List(_Camera.get(), Camera)
+
+Get = get # emulation
+
+
diff --git a/intern/python/modules/Blender/Draw.py b/intern/python/modules/Blender/Draw.py
new file mode 100644
index 00000000000..6c2cb8be09f
--- /dev/null
+++ b/intern/python/modules/Blender/Draw.py
@@ -0,0 +1 @@
+from _Blender.Draw import *
diff --git a/intern/python/modules/Blender/Image.py b/intern/python/modules/Blender/Image.py
new file mode 100644
index 00000000000..c1737e1bb0d
--- /dev/null
+++ b/intern/python/modules/Blender/Image.py
@@ -0,0 +1,52 @@
+"""The Blender Image module
+
+ This module provides (yet) basic support for Blender *Image* data blocks
+
+ Example::
+
+ from Blender import Image
+ im = Image.Load('dead-parrot.jpg')
+"""
+
+import _Blender.Image as _Image
+import shadow
+
+class Image(shadow.shadow):
+ """Image DataBlock object
+
+ See above example on how to create instances of Image objects.
+
+ Attributes
+
+ xrep -- Texture image tiling factor (subdivision) in X
+
+ yrep -- Texture image tiling factor (subdivision) in Y
+
+ LATER:
+
+ * Image buffer access
+
+ * better loading / saving of images
+"""
+ pass
+
+def get(name):
+ """If 'name' given, the Image 'name' is returned if existing, 'None' otherwise.
+If no name is given, a list of all Images is returned"""
+ pass
+
+def Load(filename):
+ """Returns image from file 'filename' as Image object if found, 'None' else."""
+ pass
+
+def New(name):
+ """This function is currently not implemented"""
+ pass
+
+# override all functions again, the above classes are just made
+# for documentation
+
+get = _Image.get
+Get = get
+Load = _Image.Load
+
diff --git a/intern/python/modules/Blender/Ipo.py b/intern/python/modules/Blender/Ipo.py
new file mode 100644
index 00000000000..110f95a5d07
--- /dev/null
+++ b/intern/python/modules/Blender/Ipo.py
@@ -0,0 +1,279 @@
+"""The Blender Ipo module
+
+This module provides access to **Ipo** objects in Blender.
+
+An Ipo object is a datablock of IpoCurves which control properties of
+an object in time.
+
+Note that IpoCurves assigned to rotation values (which must be specified
+in radians) appear scaled in the IpoWindow (which is in fact true, due
+to the fact that conversion to an internal unit of 10.0 angles happens).
+
+Example::
+
+ from Blender import Ipo, Object
+
+ ipo = Ipo.New('Object', 'ObIpo') # Create object ipo with name 'ObIpo'
+ curve = ipo.addCurve('LocY') # add IpoCurve for LocY
+ curve.setInterpolation('Bezier') # set interpolation type
+ curve.setExtrapolation('CyclicLinear') # set extrapolation type
+
+ curve.addBezier((0.0, 0.0)) # add automatic handle bezier point
+ curve.addBezier((20.0, 5.0), 'Free', (10.0, 4.0)) # specify left handle, right auto handle
+ curve.addBezier((30.0, 1.0), 'Vect') # automatic split handle
+ curve.addBezier((100.0, 1.0)) # auto handle
+
+ curve.update() # recalculate curve handles
+
+ curve.eval(35.0) # evaluate curve at 35.0
+
+ ob = Object.get('Plane')
+ ob.setIpo(ipo) # assign ipo to object
+"""
+
+import _Blender.Ipo as _Ipo
+
+import shadow
+
+_RotIpoCurves = ["RotX", "RotY", "RotZ", "dRotX", "dRotY", "dRotZ"]
+
+_radian_factor = 5.72957814 # 18.0 / 3.14159255
+
+def _convertBPoint(b):
+ f = _radian_factor
+ newb = BezierPoint()
+ p = b.pt
+ q = newb.pt
+ q[0], q[1] = (p[0], f * p[1])
+ p = b.h1
+ q = newb.h1
+ q[0], q[1] = (p[0], f * p[1])
+ p = b.h2
+ q = newb.h2
+ q[0], q[1] = (p[0], f * p[1])
+ return newb
+
+
+class IpoBlock(shadow.shadowEx):
+ """Wrapper for Blender Ipo DataBlock
+
+ Attributes
+
+ curves -- list of owned IpoCurves
+"""
+ def get(self, channel = None):
+ """Returns curve with channel identifier 'channel', which is one of the properties
+listed in the Ipo Window, 'None' if not found.
+If 'channel' is not specified, all curves are returned in a list"""
+ if channel:
+ for c in self._object.curves:
+ if c.name == channel:
+ return IpoCurve(c)
+ return None
+ else:
+ return map(lambda x: IpoCurve(x), self._object.curves)
+
+ def __getitem__(self, k):
+ """Emulates dictionary syntax, e.g. ipocurve = ipo['LocX']"""
+ curve = self.get(k)
+ if not curve:
+ raise KeyError, "Ipo does not have a curve for channel %s" % k
+ return curve
+
+ def __setitem__(self, k, val):
+ """Emulates dictionary syntax, e.g. ipo['LocX'] = ipocurve"""
+ c = self.addCurve(k, val)
+
+ has_key = get # dict emulation
+
+ items = get # dict emulation
+
+ def keys(self):
+ return map(lambda x: x.name, self.get())
+
+ def addCurve(self, channel, curve = None):
+ """Adds a curve of channel type 'channel' to the Ipo Block. 'channel' must be one of
+the object properties listed in the Ipo Window. If 'curve' is not specified,
+an empty curve is created, otherwise, the existing IpoCurve 'curve' is copied and
+added to the IpoBlock 'self'.
+In any case, the added curve is returned.
+"""
+ if curve:
+ if curve.__class__.__name__ != "IpoCurve":
+ raise TypeError, "IpoCurve expected"
+ c = self._object.addCurve(channel, curve._object)
+
+ ### RotIpo conversion hack
+ if channel in _RotIpoCurves:
+ print "addCurve, converting", curve.name
+ c.points = map(_convertBPoint, curve.bezierPoints)
+ else:
+ c.points = curve.bezierPoints
+ else:
+ c = self._object.addCurve(channel)
+ return IpoCurve(c)
+
+ _getters = { 'curves' : get }
+
+class BezierPoint:
+ """BezierPoint object
+
+ Attributes
+
+ pt -- Coordinates of the Bezier point
+
+ h1 -- Left handle coordinates
+
+ h2 -- Right handle coordinates
+
+ h1t -- Left handle type (see IpoCurve.addBezier(...) )
+
+ h2t -- Right handle type
+"""
+
+BezierPoint = _Ipo.BezTriple # override
+
+class IpoCurve(shadow.shadowEx):
+ """Wrapper for Blender IpoCurve
+
+ Attributes
+
+ bezierPoints -- A list of BezierPoints (see class BezierPoint),
+ defining the curve shape
+"""
+
+ InterpolationTypes = _Ipo.InterpolationTypes
+ ExtrapolationTypes = _Ipo.ExtrapolationTypes
+
+ def __init__(self, object):
+ self._object = object
+ self.__dict__['bezierPoints'] = self._object.points
+
+ def __getitem__(self, k):
+ """Emulate a sequence of BezierPoints"""
+ print k, type(k)
+ return self.bezierPoints[k]
+
+ def __repr__(self):
+ return "[IpoCurve %s]" % self.name
+
+ def __len__(self):
+ return len(self.bezierPoints)
+
+ def eval(self, time):
+ """Returns float value of curve 'self' evaluated at time 'time' which
+must be a float."""
+ return self._object.eval(time)
+
+ def addBezier(self, p, leftType = 'Auto', left = None, rightType = None, right = None):
+ """Adds a Bezier triple to the IpoCurve.
+
+The following values are float tuples (x,y), denoting position of a control vertex:
+
+p -- The position of the Bezier point
+
+left -- The position of the leftmost handle
+
+right -- The position of the rightmost handle
+
+'leftType', 'rightType' must be one of:
+
+"Auto" -- automatic handle calculation. In this case, 'left' and 'right' don't need to be specified
+
+"Vect" -- automatic split handle calculation. 'left' and 'right' are disregarded.
+
+"Align" -- Handles are aligned automatically. In this case, 'right' does not need to be specified.
+
+"Free" -- Handles can be set freely - this requires both arguments 'left' and 'right'.
+
+"""
+
+ b = _Ipo.BezTriple()
+ b.pt[0], b.pt[1] = (p[0], p[1])
+ b.h1t = leftType
+
+ if rightType:
+ b.h2t = rightType
+ else:
+ b.h2t = leftType
+
+ if left:
+ b.h1[0], b.h1[1] = (left[0], left[1])
+
+ if right:
+ b.h2[0], b.h2[1] = (right[0], right[1])
+
+ self.__dict__['bezierPoints'].append(b)
+ return b
+
+ def update(self, noconvert = 0):
+ # This is an ugly fix for the 'broken' storage of Rotation
+ # ipo values. The angles are stored in units of 10.0 degrees,
+ # which is totally inconsistent with anything I know :-)
+ # We can't (at the moment) change the internals, so we
+ # apply a conversion kludge..
+ if self._object.name in _RotIpoCurves and not noconvert:
+ points = map(_convertBPoint, self.bezierPoints)
+ else:
+ points = self.bezierPoints
+ self._object.points = points
+ self._object.update()
+
+ def getInterpolationType(self, ipotype):
+ "Returns the Interpolation type - see also IpoCurve.InterpolationTypes"
+ return self._object.getInterpolationType()
+
+ def setInterpolationType(self, ipotype):
+ """Sets the interpolation type which must be one of IpoCurve.InterpolationTypes"""
+ try:
+ self._object.setInterpolationType(ipotype)
+ except:
+ raise TypeError, "must be one of %s" % self.InterpolationTypes.keys()
+
+ def getExtrapolationType(self, ipotype):
+ "Returns the Extrapolation type - see also IpoCurve.ExtrapolationTypes"
+ return self._object.getExtrapolationType()
+
+ def setExtrapolationType(self, ipotype):
+ """Sets the interpolation type which must be one of IpoCurve.ExtrapolationTypes"""
+ try:
+ self._object.setInterpolationType(ipotype)
+ except:
+ raise TypeError, "must be one of %s" % self.ExtrapolationTypes.keys()
+
+
+def New(blocktype, name = None):
+ """Returns a new IPO block of type 'blocktype' which must be one of:
+["Object", "Camera", "World", "Material"]
+"""
+ if name:
+ i = _Ipo.New(blocktype, name)
+ else:
+ i = _Ipo.New(blocktype)
+ return IpoBlock(i)
+
+def Eval(ipocurve, time): # emulation code
+ """This function is just there for compatibility.
+Use IpoCurve.eval(time) instead"""
+ return ipocurve.eval(time)
+
+def Recalc(ipocurve): # emulation code
+ """This function is just there for compatibility. Note that Ipos
+assigned to rotation values will *not* get converted to the proper
+unit of radians.
+In the new style API, use IpoCurve.update() instead"""
+ return ipocurve.update(1)
+
+def get(name = None):
+ """If 'name' given, the Ipo 'name' is returned if existing, 'None' otherwise.
+If no name is given, a list of all Ipos is returned"""
+ if name:
+ ipo = _Ipo.get(name)
+ if ipo:
+ return IpoBlock(ipo)
+ else:
+ return None
+ else:
+ return shadow._List(_Ipo.get(), IpoBlock)
+
+Get = get # emulation
diff --git a/intern/python/modules/Blender/Lamp.py b/intern/python/modules/Blender/Lamp.py
new file mode 100644
index 00000000000..ab7ed63592a
--- /dev/null
+++ b/intern/python/modules/Blender/Lamp.py
@@ -0,0 +1,168 @@
+"""The Blender Lamp module
+
+This module provides control over **Lamp** objects in Blender.
+
+Example::
+
+ from Blender import Lamp
+ l = Lamp.New('Spot')
+ l.setMode('square', 'shadow')
+ ob = Object.New('Lamp')
+ ob.link(l)
+"""
+
+import _Blender.Lamp as _Lamp
+import shadow
+
+_validBufferSizes = [512, 768, 1024, 1536, 2560]
+
+def _setBufferSize(self, bufsize):
+ """Set the lamp's buffersize. This function makes sure that a valid
+bufferSize value is set (unlike setting lamp.bufferSize directly)"""
+ if bufsize not in _validBufferSizes:
+ print """Buffer size should be one of:
+%s
+Setting to default 512""" % _validBufferSizes
+ bufsize = 512
+ self._object.bufferSize = bufsize
+
+class Lamp(shadow.hasIPO, shadow.hasModes):
+ """Wrapper for Blender Lamp DataBlock
+
+ Attributes
+
+ mode -- Lamp mode value - see EditButtons. Do not access directly
+ See setMode()
+
+ type -- Lamp type value - see EditButtons. No direct access, please.
+ See setType()
+
+ col -- RGB vector (R, G, B) of lamp colour
+
+ energy -- Intensity (float)
+
+ dist -- clipping distance of a spot lamp or decay range
+
+ spotSize -- float angle (in degrees) of spot cone
+ (between 0.0 and 180.0)
+
+ spotBlend -- value defining the blurriness of the spot edge
+
+ haloInt -- Halo intensity
+
+ clipStart -- shadow buffer clipping start
+
+ clipStart -- shadow buffer clipping end
+
+ bias -- The bias value for the shadowbuffer routine
+
+ softness -- The filter value for the shadow blurring
+
+ samples -- Number of samples in shadow calculation - the
+ larger, the better
+
+ bufferSize -- Size of the shadow buffer which should be one of:
+ [512, 768, 1024, 1536, 2560]
+
+ haloStep -- Number of steps in halo calculation - the smaller, the
+ the better (and slower). A value of 0 disables shadow
+ halo calculation
+ """
+
+ _emulation = {'Energ' : "energy",
+ 'SpoSi' : "spotSize",
+ 'SpoBl' : "SpotBlend",
+ 'HaInt' : "haloInt",
+ 'Dist' : "dist",
+ 'Quad1' : "quad1",
+ 'Quad2' : "quad2",
+ }
+
+ _setters = {'bufferSize' : _setBufferSize}
+
+ t = _Lamp.Types
+
+ Types = {'Lamp' : t.LOCAL,
+ 'Spot' : t.SPOT,
+ 'Sun' : t.SUN,
+ 'Hemi' : t.HEMI,
+ }
+
+ t = _Lamp.Modes
+
+ Modes = {'quad' : t.QUAD,
+ 'sphere' : t.SPHERE,
+ 'shadow' : t.SHAD,
+ 'halo' : t.HALO,
+ 'layer' : t.LAYER,
+ 'negative' : t.NEG,
+ 'onlyShadow' : t.ONLYSHADOW,
+ 'square' : t.SQUARE,
+ }
+
+ del t
+
+ def __repr__(self):
+ return "[Lamp \"%s\"]" % self.name
+
+ def setType(self, name):
+ """Set the Lamp type of Lamp 'self'. 'name' must be a string of:
+
+* 'Lamp': A standard point light source
+
+* 'Spot': A spot light
+
+* 'Sun' : A unidirectional light source, very far away (like a Sun!)
+
+* 'Hemi': A diffuse hemispherical light source (daylight without sun)"""
+
+ try:
+ self._object.type = self.Types[name]
+ except:
+ raise TypeError, "type must be one of %s" % self.Types.keys()
+
+ def getType(self):
+ """Returns the lamp's type as string. See setType()"""
+ for k in self.Types.keys():
+ if self.Types[k] == self.type:
+ return k
+
+ def getMode(self):
+ """Returns the Lamp modes as a list of strings"""
+ return shadow._getModeBits(self.Modes, self._object.mode)
+
+ def setMode(self, *args):
+ """Set the Lamp mode of Lamp 'self'. This function takes a variable number
+of string arguments of the types listed in self.Modes.
+
+ Example::
+
+ l = Lamp.New()
+ l.setMode('quad', 'shadow')
+"""
+ print args
+ self._object.mode = shadow._setModeBits(self.Modes, args)
+
+ def getBufferSize(self):
+ return self.bufferSize
+
+def New(type = "Lamp", name = "Lamp"):
+ """Returns a new Lamp datablock of type 'type' and optional name 'name'
+"""
+ t = Lamp.Types[type]
+ rawlamp = _Lamp.New()
+ rawlamp.type = t
+ rawlamp.name = name
+ return Lamp(rawlamp)
+
+
+def get(name = None):
+ """If 'name' given, the Lamp 'name' is returned if existing, 'None' otherwise.
+If no name is given, a list of all Lamps is returned"""
+
+ if name:
+ return Lamp(_Lamp.get(name))
+ else:
+ return shadow._List(_Lamp.get(), Lamp)
+
+Types = _Lamp.Types
diff --git a/intern/python/modules/Blender/Material.py b/intern/python/modules/Blender/Material.py
new file mode 100644
index 00000000000..f24541f0f03
--- /dev/null
+++ b/intern/python/modules/Blender/Material.py
@@ -0,0 +1,251 @@
+"""The Blender Material module
+
+ This module provides access to *Material* datablocks
+
+ Example::
+
+ from Blender import Material, NMesh, Object, Scene
+ m = Material.New() # create free Material datablock
+ m.rgbCol = (1.0, 0.0, 0.3) # assign RGB values
+ mesh = NMesh.GetRaw() # get new mesh
+ mesh.addMaterial(m) # add material to mesh
+ object = Object.New('Mesh') # create new object
+ object.link(mesh) # link mesh data to object
+ Scene.getCurrent().link(ob) # link object to current scene
+"""
+
+import _Blender.Material as _Material
+import shadow
+#import Blender.Curve as Curve
+
+# These are getters and setters needed for emulation
+
+def _getRGB(obj):
+ return (obj.R, obj.G, obj.B)
+
+def _getSpec(obj):
+ return (obj.specR, obj.specG, obj.specB)
+
+def _getMir(obj):
+ return (obj.mirR, obj.mirG, obj.mirB)
+
+def _setRGB(obj, rgb):
+ obj.R, obj.G, obj.B = rgb
+
+def _setSpec(obj, rgb):
+ obj.specR, obj.specG, obj.specB = rgb
+
+def _setMir(obj, rgb):
+ obj.mirR, obj.mirG, obj.mirB = rgb
+
+
+
+class Material(shadow.hasIPO, shadow.hasModes):
+ """Material DataBlock object
+
+ See example in the Material module documentation on how to create
+ an instance of a Material object.
+
+ Attributes
+
+ The following attributes are colour vectors (r, g, b)
+
+ rgbCol -- The color vector (R, G, B).
+ The RGB values can be accessed individually as .R, .G and .B
+
+ specCol -- Specularity color vector (specR, specG, specG)
+
+ mirCol -- Mirror color vector (mirR, mirG, mirB)
+
+ The following are float values:
+
+ alpha -- The transparency
+
+ ref -- Reflectivity float value
+
+ emit -- Emit intensity value
+
+ amb -- Ambient intensity value
+
+ spec -- specularity value
+
+ specTransp -- Specular transpareny
+
+ haloSize -- Halo size
+
+ mode -- The material mode bit vector - see Material.ModeFlags
+
+ hard -- The hardness value
+
+"""
+
+ _emulation = {'Mode' : "mode",
+ 'Ref' : "ref",
+ 'HaSize' : "haloSize",
+ 'SpTra' : "specTransp",
+ 'Alpha' : "alpha",
+ 'Spec' : "spec",
+ 'Emit' : "emit",
+ 'Hard' : "hard",
+ 'Amb' : "amb",
+ }
+
+ _getters = {'rgbCol' : _getRGB,
+ 'specCol' : _getSpec,
+ 'mirCol' : _getMir,
+ }
+
+ _setters = {'rgbCol' : _setRGB,
+ 'specCol' : _setSpec,
+ 'mirCol' : _setMir,
+ }
+
+ t = _Material.Modes
+
+ Modes = {'traceable' : t.TRACEABLE,
+ 'shadow' : t.SHADOW,
+ 'shadeless' : t.SHADELESS,
+ 'wire' : t.WIRE,
+ 'vcolLight' : t.VCOL_LIGHT,
+ 'vcolPaint' : t.VCOL_PAINT,
+ 'zTransp' : t.ZTRANSP,
+ 'zInvert' : t.ZINVERT,
+ 'onlyShadow': t.ONLYSHADOW,
+ 'star' : t.STAR,
+ 'texFace' : t.TEXFACE,
+ 'noMist' : t.NOMIST,
+ }
+
+ t = _Material.HaloModes
+
+ HaloModes = { "rings" : t.RINGS,
+ "lines" : t.LINES,
+ "tex" : t.TEX,
+ "haloPuno": t.PUNO,
+ "shade" : t.SHADE,
+ "flare" : t.FLARE,
+ }
+
+
+ del t
+
+ def setMode(self, *args):
+ """Set the mode of 'self'. This function takes a variable number
+of string arguments of the types listed in self.Modes.
+
+ Example::
+
+ m = Material.New()
+ m.setMode('shadow', 'wire')
+"""
+ flags = 0
+ try:
+ for a in args:
+ flags |= self.Modes[a]
+ except:
+ raise TypeError, "mode must be one of" % self.Modes.keys()
+ self._object.mode = flags
+
+ def setHaloMode(self, *args):
+ """Sets the material to Halo mode.
+This function takes a variable number of string arguments of the types
+listed in self.HaloModes"""
+ flags = _Material.Modes.HALO
+
+ try:
+ for a in args:
+ flags |= self.HaloModes[a]
+ except:
+ raise TypeError, "mode must be one of" % self.HaloModes.keys()
+ self._object.mode = flags
+
+
+class ModeFlags:
+ """Readonly dictionary
+
+...containing Material mode bitvectors:
+
+|------------------------------------------|
+| Name | Description |
+|==========================================|
+| TRACEABLE | visible for shadow lamps |
+|------------------------------------------|
+| SHADOW | cast shadow |
+|------------------------------------------|
+| SHADELESS | do not shade |
+|------------------------------------------|
+| WIRE | draw in wireframe |
+|------------------------------------------|
+| VCOL_LIGHT | use vertex colors |
+| | with lighting |
+|------------------------------------------|
+| VCOL_PAINT | vertex colours |
+|------------------------------------------|
+| HALO | Halo material |
+|------------------------------------------|
+| ZTRANSP | Z transparency |
+|------------------------------------------|
+| ZINVERT | invert Z |
+|------------------------------------------|
+| ONLYSHADOW | only shadow, but |
+| | don't render |
+|------------------------------------------|
+| STAR | ? |
+|------------------------------------------|
+| TEXFACE | textured faces |
+|------------------------------------------|
+| NOMIST | disable mist |
+|------------------------------------------|
+
+These mode flags directly represent the buttons in the Material parameters
+window (EditButtons)
+
+Example::
+
+ # be 'm' a material
+ from Blender.Material.Modes import *
+ m.mode |= (TRACEABLE + WIRE) # Set 'wire' and 'traceable' flagsd
+ m.mode &= ~SHADELESS # clear 'shadeless' flag
+"""
+
+ t = _Material.Modes
+ TRACEABLE = t.TRACEABLE
+ SHADOW = t.SHADOW
+ SHADELESS = t.SHADELESS
+ WIRE = t.WIRE
+ VCOL_LIGHT = t.VCOL_LIGHT
+ VCOL_PAINT = t.VCOL_PAINT
+ HALO = t.HALO
+ ZTRANSP = t.ZTRANSP
+ ZINVERT = t.ZINVERT
+ ONLYSHADOW = t.ONLYSHADOW
+ STAR = t.STAR
+ TEXFACE = t.TEXFACE
+ NOMIST = t.NOMIST
+ del t
+
+# override:
+ModeFlags = _Material.Modes
+
+def get(name = None):
+ """If 'name' given, the Material 'name' is returned if existing, 'None' otherwise.
+If no name is given, a list of all Materials is returned"""
+ if name:
+ return Material(_Material.get(name))
+ else:
+ return shadow._List(_Material.get(), Material)
+
+Get = get # emulation
+
+def New(name = None):
+ """Creates a new, empty Material and returns it.
+
+Example::
+
+ from Blender import Material
+ mat = Material.New()
+"""
+ mat = Material(_Material.New())
+ if name:
+ mat.name = name
+ return mat
diff --git a/intern/python/modules/Blender/Mesh.py b/intern/python/modules/Blender/Mesh.py
new file mode 100644
index 00000000000..dd8103919f8
--- /dev/null
+++ b/intern/python/modules/Blender/Mesh.py
@@ -0,0 +1,250 @@
+"""The Blender Mesh module
+
+ This module provides routines for more extensive mesh manipulation.
+ Later, this Mesh type will also allow interactive access (like in
+ EditMode).
+ In the Publisher, Ngons will also be supported (and converted to
+ triangles on mesh.update(). The following code demonstrates
+ creation of an Ngon.
+
+ Example::
+
+ from Blender import Mesh, Object, Scene
+
+ m = Mesh.New() # new empty mesh
+ vlist = []
+ vlist.append(m.addVert((-0.0, -1.0, 0.0)))
+ vlist.append(m.addVert((1.0, 0.0, 0.0)))
+ vlist.append(m.addVert((1.0, 1.0, 0.0)))
+ vlist.append(m.addVert((0.0, 3.0, 0.0)))
+ vlist.append(m.addVert((-1.0, 2.0, 0.0)))
+ vlist.append(m.addVert((-3.0, 1.0, 0.0)))
+ vlist.append(m.addVert((-3.0, 3.0, 0.0)))
+ vlist.append(m.addVert((-4.0, 3.0, 0.0)))
+ vlist.append(m.addVert((-4.0, 0.0, 0.0)))
+
+ f = m.addFace(vlist)
+
+ # do some calculations: top project vertex coordinates to
+ # UV coordinates and normalize them to the square [0.0, 1.0]*[0.0, 1.0]
+
+ uvlist = map(lambda x: (x.co[0], x.co[1]), vlist)
+ maxx = max(map(lambda x: x[0], uvlist))
+ maxy = max(map(lambda x: x[1], uvlist))
+ minx = min(map(lambda x: x[0], uvlist))
+ miny = min(map(lambda x: x[1], uvlist))
+
+ len = max((maxx - minx), (maxy - miny))
+ offx = -minx / len
+ offy = -miny / len
+
+ f.uv = map(lambda x: (x[0]/len + offx, x[1]/len + offy), uvlist) # assign UV coordinates by 'top' projection
+
+ m.update() # update and triangulate mesh
+
+ ob = Object.New('Mesh') # create new Object
+ ob.link(m) # link mesh data
+ sc = Scene.getCurrent() # get current Scene
+ sc.link(ob) # link Object to scene
+"""
+
+from Blender.Types import NMFaceType
+import Blender.Material as Material
+
+from _Blender import NMesh as _NMesh
+
+FACEFLAGS = _NMesh.Const
+DEFAULTFLAGS = FACEFLAGS.LIGHT + FACEFLAGS.DYNAMIC
+
+import shadow
+
+def makeFace(f):
+ face = _NMesh.Face()
+ for v in f:
+ face.v.append(v)
+ face.uv.append((v.uvco[0], v.uvco[1]))
+ return face
+
+def toTriangles(ngon):
+ from utils import tesselation
+ # This should be a Publisher only feature...once the tesselation
+ # is improved. The GLU tesselator of Mesa < 4.0 is crappy...
+ if len(ngon.uv) == len(ngon.v):
+ i = 0
+ for v in ngon.v:
+ v.uvco = ngon.uv[i]
+ i += 1
+
+ return tesselation.NgonAsTriangles(ngon, makeFace) # return triangles
+
+def Color(r, g, b, a = 1.0):
+ return _NMesh.Col(255 * r, 255 * g, 255 * b, 255 * a)
+
+class Vert: #shadow NMVert class for the tesselator
+ """Vertex wrapper class
+This class emulates a float coordinate vector triple
+"""
+ def __init__(self):
+ self.vert = None
+ self.uv = []
+ def __len__(self):
+ return 3
+ def __setitem__(self, i, val):
+ self.vert[i] = val
+ def __getitem__(self, i):
+ return self.vert.co[i]
+
+class Face:
+ """Face wrapper class
+This class emulates a list of vertex references
+"""
+ def __init__(self, vlist):
+ self.v= vlist
+ self.uv = []
+
+ def __len__(self):
+ return len(self.v)
+
+ def __setitem__(self, i, val):
+ self.v[i] = val
+
+ def __getitem__(self, i):
+ return self.v[i]
+
+# override:
+
+Vert = _NMesh.Vert
+Face = _NMesh.Face
+
+class rawMesh:
+ """Wrapper for raw Mesh data"""
+ def __init__(self, object = None):
+ if object:
+ self._object = object
+ else:
+ self._object = _NMesh.GetRaw()
+
+ self.flags = DEFAULTFLAGS
+ self.smooth = 0
+ self.recalc_normals = 1
+ self.faces = self._object.faces[:]
+
+ def __getattr__(self, name):
+ if name == 'vertices':
+ return self._object.verts
+ elif name == 'has_col':
+ return self._object.hasVertexColours()
+ elif name == 'has_uv':
+ return self._object.hasFaceUV()
+ else:
+ return getattr(self._object, name)
+
+ def __repr__(self):
+ return "Mesh: %d faces, %d vertices" % (len(self.faces), len(self.verts))
+
+ def hasFaceUV(self, true = None):
+ """Sets the per-face UV texture flag, if 'true' specified (either
+ 0 or 1). Returns the texture flag in any case."""
+ if true == None:
+ return self._object.hasFaceUV()
+ return self._object.hasFaceUV(true)
+
+ def hasVertexUV(self, true = None):
+ """Sets the per-vertex UV texture flag, if 'true' specified (either
+ 0 or 1). Returns the texture flag in any case."""
+ if true == None:
+ return self._object.hasVertexUV()
+ return self._object.hasVertexUV(true)
+
+ def hasVertexColours(self, true = None):
+ """Sets the per-face UV texture flag, if 'true' specified (either
+ 0 or 1). Returns the texture flag in any case."""
+ if true == None:
+ return self._object.hasVertexColours()
+ return self._object.hasVertexColours(true)
+
+ def addVert(self, v):
+ """Adds a vertex to the mesh and returns a reference to it. 'v' can
+be a float triple or any data type emulating a sequence, containing the
+coordinates of the vertex. Note that the returned value references an
+*owned* vertex"""
+ vert = _NMesh.Vert(v[0], v[1], v[2])
+ self._object.verts.append(vert)
+ return vert
+
+ def addFace(self, vlist, flags = None, makedefaultUV = 0):
+ """Adds a face to the mesh and returns a reference to it. 'vlist'
+must be a list of vertex references returned by addVert().
+Note that the returned value references an *owned* face"""
+ if type(vlist) == NMFaceType:
+ face = vlist
+ else:
+ n = len(vlist)
+ face = _NMesh.Face(vlist)
+ if makedefaultUV:
+ face.uv = defaultUV[:n]
+
+ self.faces.append(face)
+ # turn on default flags:
+ if not flags:
+ face.mode = self.flags
+ else:
+ face.mode = flags
+ return face
+
+ def update(self):
+ """Updates the mesh datablock in Blender"""
+ o = self._object
+ o = self._object
+ o.faces = []
+ smooth = self.smooth
+ for f in self.faces:
+ if len(f) > 4: #it's a NGON
+ faces = toTriangles(f)
+ for nf in faces:
+ nf.smooth = smooth
+ o.faces.append(nf)
+ else:
+ o.faces.append(f)
+ o.update()
+
+ def link(self, material):
+ """Link material 'material' with the mesh. Note that a mesh can
+currently have up to 16 materials, which are referenced by
+Face().materialIndex"""
+ mats = self._object.materials
+ if material in mats:
+ print "material already assigned to mesh"
+ return
+ mats.append(material._object)
+
+ def unlink(self, material):
+ """Unlink (remove) material 'material' from the mesh. Note
+that the material indices per face need to be updated."""
+ self._object.materials.remove(material._object)
+
+ def setMaterials(self, materials = []):
+ """Sets materials. 'materials' must be a list of valid material objects
+Note that a mesh can currently have up to 16 materials, which are referenced
+by Face().materialIndex"""
+
+ self._object.materials = (map(lambda x: x._object, materials))
+
+ def getMaterials(self, materials = []):
+ """Returns materials assigned to the mesh"""
+ return shadow._List(self._object.materials, Material.Material)
+
+def New():
+ return rawMesh()
+
+def get(name = None):
+ """If 'name' given, the Mesh 'name' is returned if existing, 'None' otherwise."""
+ if name:
+ ob = _NMesh.GetRaw(name)
+ if ob:
+ return rawMesh(ob)
+ else:
+ return None
+ else:
+ raise SystemError, "get() for Meshes is not yet supported"
+
diff --git a/intern/python/modules/Blender/NMesh.py b/intern/python/modules/Blender/NMesh.py
new file mode 100644
index 00000000000..3e6c60bab21
--- /dev/null
+++ b/intern/python/modules/Blender/NMesh.py
@@ -0,0 +1,192 @@
+"""The Blender NMesh module
+
+ This module provides access to the raw **Mesh** data block.
+
+ Examples will not be given, as the life time of this module will be
+ most probably limited. Use the 'Mesh' module instead.
+"""
+
+import _Blender.NMesh as _NMesh
+import shadow
+
+class Mesh(shadow.shadow):
+ """The NMesh object
+
+ This contains a copy of the raw mesh object data.
+
+ Attributes
+
+ verts -- A list of vertices of type 'Vert'
+
+ faces -- List of faces of type 'Face'
+"""
+ def update(self):
+ """updates the mesh object in Blender with the modified mesh data"""
+ self._object.update()
+
+class Vert:
+ """Vertex object
+
+ Attributes
+
+ co -- The vertex coordinates (x, y, z)
+
+ no -- Vertex normal vector (nx, ny, nz)
+
+ uvco -- Vertex texture ("sticky") coordinates
+
+ index -- The vertex index, if owned by a mesh
+"""
+
+class Face:
+ """Face object
+
+ Attributes
+
+ mode -- Display mode, see NMesh.FaceModes
+
+ flag -- flag bit vector, specifying selection flags.
+ see NMesh.FaceFlags
+
+ transp -- transparency mode bit vector; see NMesh.FaceTranspModes
+
+ v -- List of Face vertices
+
+ col -- List of Vertex colours
+
+ materialIndex -- Material index (referring to one of the Materials in
+ the Meshes material list, see Mesh documentation
+
+ smooth -- Flag whether smooth normals should be calculated (1 = yes)
+
+ image -- Reference to texture image object
+
+ uv -- A list of per-face UV coordinates:
+ [(u0, v0), (u1, v1), (u2, v2), .. ]
+"""
+
+class Col:
+ """Colour object
+
+ See NMesh module documentation for an example.
+
+ Attributes
+
+ r, g, b, a -- The RGBA components of the colour
+ A component must lie in the range of [0, 255]
+"""
+
+
+class FaceModes:
+ """Face mode bit flags
+
+ BILLBOARD -- always orient after camera
+
+ DYNAMIC -- respond to collisions
+
+ INVISIBLE -- invisible face
+
+ HALO -- halo face, always point to camera
+
+ LIGHT -- dynamic lighting
+
+ OBCOL -- use object colour instead of vertex colours
+
+ SHADOW -- shadow type
+
+ SHAREDCOL -- shared vertex colors (per vertex)
+
+ TEX -- has texture image
+
+ TILES -- uses tiled image
+
+ TWOSIDE -- twosided face
+"""
+ t = _NMesh.Const
+ BILLBOARD = t.BILLBOARD
+ DYNAMIC = t.DYNAMIC
+ INVISIBLE = t.INVISIBLE
+ HALO = t.HALO
+ LIGHT = t.LIGHT
+ OBCOL = t.OBCOL
+ SHADOW = t.SHADOW
+ SHAREDCOL = t.SHAREDCOL
+ TEX = t.TEX
+ TILES = t.TILES
+ TWOSIDE = t.TWOSIDE
+ del t
+
+
+class FaceTranspModes:
+ """Readonly dictionary
+
+...containing Face transparency draw modes. They are of type 'enum', i.e.
+can not be combined like a bit vector.
+
+ SOLID -- draw solid
+
+ ADD -- add to background(halo)
+
+ ALPHA -- draw with transparency
+
+ SUB -- subtract from background
+"""
+ t = _NMesh.Const
+ SOLID = t.SOLID
+ ADD = t.ADD
+ ALPHA = t.ALPHA
+ SUB = t.SUB
+ del t
+
+class FaceFlags:
+ """Readonly dictionary
+
+...containing Face flags bitvectors:
+
+ SELECT -- selected
+
+ HIDE -- hidden
+
+ ACTIVE -- the active face
+"""
+ t = _NMesh.Const
+ SELECT = t.SELECT
+ HIDE = t.HIDE
+ ACTIVE = t.ACTIVE
+ del t
+
+
+def New(name = None):
+ """Creates a new NMesh mesh object and returns it"""
+ pass
+
+def GetRaw(name = None):
+ """If 'name' specified, the Mesh object with 'name' is returned, 'None'
+if not existant. Otherwise, a new empty Mesh is initialized and returned."""
+ pass
+
+def PutRaw(mesh, name = "Mesh"):
+ """Creates a Mesh Object instance in Blender, i.e. a Mesh Object in the
+current Scene and returns a reference to it. If 'name' specified, the Mesh
+'name' is overwritten. In this case, no Object reference is returned."""
+ pass
+
+def GetRawFromObject(name):
+ """This returns the mesh as used by the object, which
+means it contains all deformations and modifications."""
+ pass
+
+# override all these functions again, because we only used them for
+# documentation -- NMesh will be no longer supported in future
+
+New = _NMesh.New
+GetRaw = _NMesh.GetRaw
+PutRaw = _NMesh.PutRaw
+GetRawFromObject = _NMesh.GetRawFromObject
+Const = _NMesh.Const
+Vert = _NMesh.Vert
+Face = _NMesh.Face
+Col = _NMesh.Col
+
+def NMesh(data):
+ return data
diff --git a/intern/python/modules/Blender/Object.py b/intern/python/modules/Blender/Object.py
new file mode 100644
index 00000000000..7fefedf4725
--- /dev/null
+++ b/intern/python/modules/Blender/Object.py
@@ -0,0 +1,391 @@
+##
+## Blender API mid level layer 01/2002 // strubi@blender.nl
+##
+## $Id$
+##
+
+"""The Blender Object module
+
+ This module provides **Object** manipulation routines.
+
+ Example::
+
+ from Blender import Object
+ ob = Object.get('Plane')
+ actobj = Object.getSelected()[0] # get active Object
+ print actobj.loc # print position
+ ob.makeParent([actobj]) # make ob the parent of actobj
+"""
+
+import _Blender.Object as _Object
+
+import shadow
+reload(shadow) # XXX
+
+class _C:
+ pass
+
+InstanceType = type(_C())
+del _C # don't export this
+
+
+def _Empty_nodata(obj):
+ return None
+
+class Object(shadow.hasIPO):
+ """Blender Object
+
+ A Blender Object (note the capital O) is the instance of a 3D structure,
+ or rather, the Object that is (normally) visible in your Blender Scene.
+
+ An instance of a Blender Object object is created by::
+
+ from Blender import Object
+ ob = Object.New(type) # type must be a valid type string,
+ # see Object.Types
+
+ ...
+
+ Attributes
+
+ Note that it is in general not recommended to access the Object's
+ attributes directly. Please rather use the get-/set- functions instead.
+
+ loc -- position vector (LocX, LocY, LocZ)
+
+ dloc -- delta position vector (dLocX, dLocY, dLocZ)
+
+ rot -- euler rotation vector (RotX, RotY, RotZ).
+ Warning: this may change in future.
+
+ drot -- delta rotation euler vector (dRotX, dRotY, dRotZ)
+ Warning: this may change in future.
+
+ size -- scale vector (SizeX, SizeY, SizeZ)
+
+ dsize -- delta scale vector (dSizeX, dSizeY, dSizeZ)
+
+ layer -- layer bitvector (20 bit), defining what layers the object is
+ visible in
+
+
+ The following items are listed here only for compatibility to older
+ scripts and are READ-ONLY! **USE the get- functions instead!**
+
+ data -- reference to the data object (e.g. Mesh, Camera, Lamp, etc.)
+
+ parent -- reference to the parent object, if existing, 'None' otherwise.
+
+ track -- reference to the tracked object, if existing, 'None' otherwise.
+
+ This bit mask can be read and written:
+
+ colbits -- the Material usage mask. A set bit #n means:
+ The Material #n in the *Object's* material list is used.
+ Otherwise, the Material #n of the Objects *Data* material list
+ is displayed.
+"""
+
+ def __init__(self, object = None):
+ """Returns an empty shadow Object"""
+ self._object = object
+
+ def __repr__(self):
+ return "[Object \"%s\"]" % self.name
+
+ def link(self, data):
+ """Links Object 'self' with data 'data'. The data type must match
+the Object's type, so you cannot link a Lamp to a mesh type Object.
+'data' can also be an Ipo object (IpoBlock)
+"""
+ from _Blender import Types
+ # special case for NMesh:
+ if type(data) == Types.NMeshType:
+ return self._object.link(data)
+ elif type(data) == InstanceType:
+ if data.__class__.__name__ == "rawMesh":
+ data.update() # update mesh
+ elif data.__class__.__name__ == "IpoBlock":
+ self.setIpo(data)
+
+ return shadow._link(self, data)
+
+ def copy(self):
+ """Returns a copy of 'self'.
+This is a true, linked copy, i.e. the copy shares the same data as the
+original. The returned object is *free*, meaning, not linked to any scene."""
+ return Object(self._object.copy())
+
+ #def clone(self):
+ #"""Makes a clone of the specified object in the current scene and
+##returns its reference"""
+ #return Object(self._object.clone())
+
+ def shareFrom(self, object):
+ """Link data of 'self' with data of 'object'. This works only if
+'object' has the same type as 'self'."""
+ return Object(self._object.shareFrom(object._object))
+
+ def getMatrix(self):
+ """Returns the object matrix"""
+ return self._object.getMatrix()
+
+ def getInverseMatrix(self):
+ """Returns the object's inverse matrix"""
+ return self._object.getInverseMatrix()
+
+ def getData(self):
+ "Returns the Datablock object containing the object's data, e.g. Mesh"
+ t = self._object.getType()
+ data = self._object.data
+ try:
+ return self._dataWrappers[t][1](data)
+ except:
+ raise TypeError, "getData() not yet supported for this object type"
+
+ def getDeformData(self):
+ """Returns the Datablock object containing the object's deformed data.
+Currently, this is only supported for a Mesh"""
+ import _Blender.NMesh as _NMesh
+ t = self._object.getType()
+ if t == self.Types['Mesh']:
+ data = _NMesh.GetRawFromObject(self.name)
+ return self._dataWrappers[t][1](data)
+ else:
+ raise TypeError, "getDeformData() not yet supported for this object type"
+
+ def getType(self):
+ "Returns type string of Object, which is one of Object.Types.keys()"
+ t = self._object.getType()
+ try:
+ return self._dataWrappers[t][0]
+ except:
+ return "<unsupported>"
+
+ def getParent(self):
+ "Returns object's parent object"
+ if self._object.parent:
+ return Object(self._object.parent)
+ return None
+
+ def getTracked(self):
+ "Returns object's tracked object"
+ if self._object.track:
+ return Object(self._object.track)
+ return None
+
+# FUTURE FEATURE :-) :
+# def getLocation():
+# """Returns the object's location (x, y, z).
+#By default, the location vector is always relative to the object's parent.
+#If the location of another coordinate system is wanted, specify 'origin' by
+#the object whose coordinate system the location should be calculated in.
+
+#If world coordinates are wanted, set 'relative' = "World"."""
+
+ def getLocation(self, relative = None):
+ """Returns the object's location (x, y, z). For the moment,
+'relative' has no effect."""
+ l = self._object.loc
+ return (l[0], l[1], l[2])
+
+ def setLocation(self, location, relative = None):
+ """Sets the object's location. 'location' must be a vector triple.
+See 'getLocation()' about relative coordinate systems."""
+ l = self._object.loc # make sure this is copied
+ l[0], l[1], l[2] = location
+
+ def getDeltaLocation(self):
+ """Returns the object's delta location (x, y, z)"""
+ l = self._object.dloc
+ return (l[0], l[1], l[2])
+
+ def setDeltaLocation(self, delta_location):
+ """Sets the object's delta location which must be a vector triple"""
+ l = self._object.dloc # make sure this is copied
+ l[0], l[1], l[2] = delta_location
+
+ def getEuler(self):
+ """Returns the object's rotation as Euler rotation vector
+(rotX, rotY, rotZ)"""
+ e = self._object.rot
+ return (e[0], e[1], e[2])
+
+ def setEuler(self, euler = (0.0, 0.0, 0.0)):
+ """Sets the object's rotation according to the specified Euler angles.
+'euler' must be a vector triple"""
+ e = self._object.rot
+ e[0], e[1], e[2] = euler
+
+ def makeParent(self, objlist, mode = 0, fast = 0):
+ """Makes 'self' the parent of the objects in 'objlist' which must be
+a list of valid Objects.
+If specified:
+
+ mode -- 0: make parent with inverse
+
+ 1: without inverse
+
+ fast -- 0: update scene hierarchy automatically
+
+ 1: don't update scene hierarchy (faster). In this case, you
+ must explicitely update the Scene hierarchy, see:
+ 'Blender.Scene.getCurrent().update()'"""
+ list = map(lambda x: x._object, objlist)
+ return Object(self._object.makeParent(list, mode, fast))
+
+ def clrParent(self, mode = 0, fast = 0):
+ """Clears parent object.
+If specified:
+
+ mode -- 2: keep object transform
+
+ fast > 0 -- don't update scene hierarchy (faster)"""
+ return Object(self._object.clrParent(mode, fast))
+
+ def getMaterials(self):
+ """Returns list of materials assigned to the object"""
+ from Blender import Material
+ return shadow._List(self._object.getMaterials(), Material.Material)
+
+ def setMaterials(self, materials = []):
+ """Sets materials. 'materials' must be a list of valid material objects"""
+ o = self._object
+ old_mask = o.colbits
+ o.colbits = -1 # set material->object linking
+ o.setMaterials(map(lambda x: x._object, materials))
+ o.colbits = old_mask
+
+ def materialUsage(self, flag):
+ """Determines the way the material is used and returns status.
+
+'flag' = 'Data' : Materials assigned to the object's data are shown. (default)
+
+'flag' = 'Object' : Materials assigned to the object are shown.
+
+The second case is desired when the object's data wants to be shared among
+objects, but not the Materials assigned to their data. See also 'colbits'
+attribute for more (and no future compatible) control."""
+ if flag == "Object":
+ self._object.colbits = -1
+ elif flag == "Data":
+ self._object.colbits = 0
+ return self._object.colbits
+ else:
+ raise TypeError, "unknown mode %s" % flag
+
+ _getters = {}
+
+ from Blender import Mesh, Camera, Lamp
+
+ t = _Object.Types
+ Types = {"Camera" : t.CAMERA,
+ "Empty" : t.EMPTY,
+ "Lamp" : t.LAMP,
+ "Mesh" : t.MESH,
+ }
+
+ # create lookup table for data wrappers
+ _dataWrappers = range(max(Types.values()) + 1)
+ _dataWrappers[t.MESH] = ("Mesh", Mesh.rawMesh)
+ _dataWrappers[t.CAMERA] = ("Camera", Camera.Camera)
+ _dataWrappers[t.LAMP] = ("Lamp", Lamp.Lamp)
+ _dataWrappers[t.EMPTY] = ("Empty", _Empty_nodata)
+
+ t = _Object.DrawTypes
+ DrawTypes = {"Bounds" : t.BOUNDBOX,
+ "Wire" : t.WIRE,
+ "Solid" : t.SOLID,
+ "Shaded" : t.SHADED,
+ }
+
+ t = _Object.DrawModes
+ DrawModes = {"axis" : t.AXIS,
+ "boundbox" : t.BOUNDBOX,
+ "texspace" : t.TEXSPACE,
+ "name" : t.NAME,
+ }
+
+
+ del t
+ del Mesh, Camera, Lamp
+
+ def getDrawMode(self):
+ """Returns the Object draw modes as a list of strings"""
+ return shadow._getModeBits(self.DrawModes, self._object.drawMode)
+
+ def setDrawMode(self, *args):
+ """Sets the Object's drawing modes as a list of strings"""
+ self._object.drawMode = shadow._setModeBits(self.DrawModes, args)
+
+ def getDrawType(self):
+ """Returns the Object draw type"""
+ for k in self.DrawTypes.keys():
+ if self.DrawTypes[k] == self.drawType:
+ return k
+
+ def setDrawType(self, name):
+ """Sets the Object draw type. 'name' must be one of:
+
+* 'Bounds' : Draw bounding box only
+
+* 'Wire' : Draw in wireframe mode
+
+* 'Solid' : Draw solid
+
+* 'Shaded' : Draw solid, shaded and textures
+"""
+ try:
+ self._object.drawType = self.DrawTypes[name]
+ except:
+ raise TypeError, "type must be one of %s" % self.DrawTypes.keys()
+
+
+##################
+# MODULE FUNCTIONS
+
+def New(objtype, name = None):
+ """Creates a new, empty object and returns it.
+'objtype' is a string and must be one of::
+
+ Camera
+ Empty
+ Mesh
+ Lamp
+
+More object types will be supported in future.
+
+Example::
+
+ ob = Object.New('Camera')
+"""
+
+ if type(objtype) == type(0):
+ obj = Object(_Object.New(objtype)) # emulate old syntax
+ else:
+ t = Object.Types[objtype]
+ obj = Object(_Object.New(t))
+ return obj
+
+def get(name = None):
+ """If 'name' given, the Object 'name' is returned if existing, 'None' otherwise.
+If no name is given, a list of all Objects is returned"""
+ if name:
+ ob = _Object.get(name)
+ if ob:
+ return Object(ob)
+ else:
+ return None
+ else:
+ return shadow._List(_Object.get(), Object)
+
+Get = get # emulation
+
+def getSelected():
+ """Returns a list of selected Objects in the active layer(s).
+The active object is the first in the list, if visible"""
+ return shadow._List(_Object.getSelected(), Object)
+
+GetSelected = getSelected # emulation
+
+Types = _Object.Types # for compatibility
diff --git a/intern/python/modules/Blender/Scene.py b/intern/python/modules/Blender/Scene.py
new file mode 100644
index 00000000000..a6deaeb5a46
--- /dev/null
+++ b/intern/python/modules/Blender/Scene.py
@@ -0,0 +1,143 @@
+"""The Blender Scene module
+
+ This module provides *Scene* manipulation routines.
+
+ Example::
+
+ from Blender import Scene
+
+ curscene = Scene.getCurrent()
+ ob = curscene.getChildren()[0] # first object
+ newscene = Scene.New('testscene')
+ cam = curscene.getCurrentCamera() # get current camera object
+ newscene.link(ob) # link 'ob' to Scene
+ newscene.link(cam)
+ newscene.makeCurrent() # make current Scene
+"""
+import _Blender.Scene as _Scene
+
+from Object import Object
+import shadow
+
+class Scene(shadow.shadowEx):
+ """Wrapper for Scene DataBlock
+"""
+ def link(self, object):
+ """Links Object 'object' into Scene 'self'."""
+ # This is a strange workaround; Python does not release
+ # 'self' (and thus self._object) when an exception in the C API occurs.
+ # Therefore, we catch that exception and do it ourselves..
+ # Maybe Python 2.2 is able to resolve this reference dependency ?
+ try:
+ return self._object.link(object._object)
+ except:
+ del self._object
+ raise
+
+ def unlink(self, object):
+ """Unlinks (deletes) Object 'object' from Scene."""
+ ret = self._object.unlink(object._object)
+ return ret
+
+ def copy(self, duplicate_objects = 1):
+ """Returns a copy of itself.
+
+The optional argument defines, how the Scene's children objects are
+duplicated::
+
+ 0: Link Objects
+ 1: Link Object data
+ 2: Full Copy"""
+ return Scene(self._object.copy(duplicate_objects))
+
+ def update(self):
+ """Updates scene 'self'.
+ This function explicitely resorts the base list of a newly created object
+ hierarchy."""
+ return self._object.update()
+
+ def makeCurrent(self):
+ """Makes 'self' the current Scene"""
+ return self._object.makeCurrent()
+
+ def frameSettings(self, start = None, end = None, current = None):
+ """Sets or retrieves the Scene's frame settings.
+If the frame arguments are specified, they are set.
+A tuple (start, end, current) is returned in any case."""
+ if start and end and current:
+ return self._object.frameSettings(start, end, current)
+ else:
+ return self._object.frameSettings()
+
+ def currentFrame(self, frame = None):
+ """If 'frame' is given, the current frame is set and returned in any case"""
+ if frame:
+ return self._object.frameSettings(-1, -1, frame)
+ return self._object.frameSettings()[2]
+
+ def startFrame(self, frame = None):
+ """If 'frame' is given, the start frame is set and returned in any case"""
+ if frame:
+ return self._object.frameSettings(frame, -1, -1)
+ return self._object.frameSettings()[0]
+
+ def endFrame(self, frame = None):
+ """If 'frame' is given, the end frame is set and returned in any case"""
+ if frame:
+ return self._object.frameSettings(-1, frame, -1)
+ return self._object.frameSettings()[1]
+
+ def getChildren(self):
+ """Returns a list of the Scene's children Objects"""
+ return shadow._List(self._object.getChildren(), Object)
+
+ def getCurrentCamera(self):
+ """Returns current active camera Object"""
+ cam = self._object.getCurrentCamera()
+ if cam:
+ return Object(cam)
+
+ def setCurrentCamera(self, object):
+ """Sets the current active camera Object 'object'"""
+ return self._object.setCurrentCamera(object._object)
+
+ def getRenderdir(self):
+ """Returns directory where rendered images are saved to"""
+ return self._object.getRenderdir(self._object)
+
+ def getBackbufdir(self):
+ """Returns the Backbuffer images location"""
+ return self._object.getBackbufdir(self._object)
+
+# Module methods
+
+def New(name = 'Scene'):
+ """Creates and returns new Scene with (optionally given) name"""
+ return Scene(_Scene.New(name))
+
+def get(name = None):
+ """Returns a Scene object with name 'name' if given, None if not existing,
+or a list of all Scenes otherwise."""
+ if name:
+ ob = _Scene.get(name)
+ if ob:
+ return Scene(ob)
+ else:
+ return None
+ else:
+ return shadow._List(_Scene.get(), Scene)
+
+Get = get # emulation
+
+def getCurrent():
+ """Returns the currently active Scene"""
+ sc = Scene(_Scene.getCurrent())
+ return sc
+
+def unlink(scene):
+ """Removes the Scene 'scene' from Blender"""
+ if scene._object.name == _Scene.getCurrent().name:
+ raise SystemError, "current Scene can not be removed!"
+ for ob in scene.getChildren():
+ scene.unlink(ob)
+ return _Scene.unlink(scene._object)
diff --git a/intern/python/modules/Blender/Text.py b/intern/python/modules/Blender/Text.py
new file mode 100644
index 00000000000..0d5f615f190
--- /dev/null
+++ b/intern/python/modules/Blender/Text.py
@@ -0,0 +1,57 @@
+"""The Blender Text module
+
+ This module lets you manipulate the Text buffers inside Blender.
+ Text objects are currently owned by the Text editor in Blender.
+
+ Example::
+
+ from Blender import Text
+ text = Text.New('Text') # create new text buffer
+ text.write('hello') # write string
+ Text.unlink(text) # delete
+"""
+
+import _Blender.Text as _Text
+
+class Text:
+ """Wrapper for Text DataBlock"""
+
+ def clear(self):
+ """Clears the Text objects text buffer"""
+ pass
+
+ def write(self, string):
+ """Appends 'string' to the text buffer"""
+ pass
+
+ def asLines(self):
+ """Returns the text buffer as a list of lines (strings)"""
+ pass
+
+ def set(self, attr, val):
+ """Set the Text attribute of name 'name' to value 'val'.
+
+Currently supported::
+
+ follow_cursor : 1: Text output follows the cursor"""
+
+# Module methods
+
+def New(name = None):
+ """Creates new empty Text with (optionally given) name and returns it"""
+ pass
+
+def get(name = None):
+ """Returns a Text object with name 'name' if given, 'None' if not existing,
+or a list of all Text objects in Blender otherwise."""
+ pass
+
+def unlink(text):
+ """Removes the Text 'text' from the Blender text window"""
+ pass
+
+
+# override:
+New = _Text.New
+get = _Text.get
+unlink = _Text.unlink
diff --git a/intern/python/modules/Blender/Types.py b/intern/python/modules/Blender/Types.py
new file mode 100644
index 00000000000..d49d9c35407
--- /dev/null
+++ b/intern/python/modules/Blender/Types.py
@@ -0,0 +1 @@
+from _Blender.Types import *
diff --git a/intern/python/modules/Blender/Window.py b/intern/python/modules/Blender/Window.py
new file mode 100644
index 00000000000..e51ab894dfa
--- /dev/null
+++ b/intern/python/modules/Blender/Window.py
@@ -0,0 +1,65 @@
+"""The Blender Window module
+
+This module currently only supports redrawing commands of windows.
+Later on, it will allow screen manipulations and access to Window
+properties"""
+
+import _Blender.Window as _Window
+
+t = _Window.Types
+Const = t # emulation
+
+Types = { 'View' : t.VIEW3D,
+ 'Ipo' : t.IPO,
+ 'Oops' : t.OOPS,
+ 'Button' : t.BUTS,
+ 'File' : t.FILE,
+ 'Image' : t.IMAGE,
+ 'Text' : t.TEXT,
+ 'Action' : t.ACTION,
+ }
+
+del t
+
+def Redraw(t= 'View'):
+ """Redraws all windows of the type 't' which must be one of:
+
+* "View" - The 3D view
+
+* "Ipo" - The Ipo Window
+
+* "Oops" - The OOPS (scenegraph) window
+
+* "Button" - The Button Window
+
+* "File" - The File Window
+
+* "Image" - The Image Window (UV editor)
+
+* "Text" - The Text editor
+
+* "Action" - The Action Window"""
+
+ if type(t) == type(1):
+ return _Window.Redraw(t)
+ try:
+ _Window.Redraw(Types[t])
+ except:
+ raise TypeError, "type must be one of %s" % Types.keys()
+
+def RedrawAll():
+ """Redraws the whole screen"""
+ _Window.RedrawAll()
+
+def drawProgressBar(val, text):
+ """Draws a progress bar behind the Blender version information.
+'val' is a float value <= 1.0, 'text' contains info about what is currently
+being done.
+This function must be called with 'val' = 0.0 at start and end of the executed
+(and probably time consuming) action.
+The user may cancel the progress with the 'Esc' key, in this case, 0 is returned,
+1 else."""
+ return _Window.draw_progressbar(val, text)
+
+draw_progressbar = _Window.draw_progressbar # emulation
+QRedrawAll = _Window.QRedrawAll
diff --git a/intern/python/modules/Blender/World.py b/intern/python/modules/Blender/World.py
new file mode 100644
index 00000000000..e0c42d33f16
--- /dev/null
+++ b/intern/python/modules/Blender/World.py
@@ -0,0 +1,157 @@
+import _Blender.World as _World
+
+import shadow
+
+def _getAmbCol(obj):
+ return obj.ambR, obj.ambG, obj.ambB
+
+def _setAmbCol(obj, rgb):
+ obj.ambR, obj.ambG, obj.ambB = rgb
+
+def _getZenCol(obj):
+ return obj.zenR, obj.zenG, obj.zenB
+
+def _setZenCol(obj, rgb):
+ obj.zenR, obj.zenG, obj.zenB = rgb
+
+def _getHorCol(obj):
+ return obj.horR, obj.horG, obj.horB
+
+def _setHorCol(obj, rgb):
+ obj.horR, obj.horG, obj.horB = rgb
+
+def _setMist(obj, mist):
+ obj.mistStart = mist.start
+ obj.mistDepth = mist.depth
+ obj.mistHeight = mist.height
+ obj.mistType = mist.type
+
+def _getMist(obj):
+ mist = Mist()
+ mist.start = obj.mistStart
+ mist.depth = obj.mistDepth
+ mist.height = obj.mistHeight
+ mist.type = obj.mistType
+ return mist
+
+class World(shadow.hasIPO, shadow.hasModes):
+ """Wrapper for Blender World DataBlock
+
+ Attributes
+
+ horCol -- horizon colour triple '(r, g, b)' where r, g, b must lie
+ in the range of [0.0, 1.0]
+
+ zenCol -- zenith colour triple
+
+ ambCol -- ambient colour triple
+
+ exposure -- exposure value
+
+ mist -- mist structure, see class Mist
+
+ starDensity -- star density (the higher, the more stars)
+
+ starMinDist -- the minimum distance to the camera
+
+ starSize -- size of the stars
+
+ starColNoise -- star colour noise
+
+ gravity -- The gravity constant (9.81 for earth gravity)
+"""
+
+ SkyTypes = {'blend' : 1,
+ 'real' : 2,
+ 'paper' : 4,
+ }
+
+ Modes = {'mist' : 1,
+ 'stars' : 2,
+ }
+
+ _emulation = {'Expos' : "exposure",
+ 'HorR' : "horR",
+ 'HorG' : "horG",
+ 'HorB' : "horB",
+ 'ZenR' : "zenR",
+ 'ZenG' : "zenG",
+ 'ZenB' : "zenB",
+ 'StarDi' : "starDensity",
+ 'StarSi' : "starSize",
+ 'MisSta' : "mistStart",
+ 'MisDi' : "mistDepth",
+ 'MisHi' : "mistHeight",
+ }
+
+ _setters = {'horCol' : _getHorCol,
+ 'zenCol' : _getZenCol,
+ 'ambCol' : _getAmbCol,
+ 'mist' : _getMist,
+ }
+
+ _setters = {'horCol' : _setHorCol,
+ 'zenCol' : _setZenCol,
+ 'ambCol' : _setAmbCol,
+ 'mist' : _setMist,
+ }
+
+ def getSkyType(self):
+ """Returns a list of the set Sky properties, see setSkyType()"""
+ list = []
+ for k in self.SkyTypes.keys():
+ i = self.SkyTypes[k]
+ if self._object.skyType & i:
+ list.append(k)
+ return list
+
+ def setSkyType(self, *args):
+ """Set the sky type. This function takes a variable number
+of string arguments of ['blend', 'real', 'paper']"""
+ flags = 0
+ try:
+ for a in args:
+ flags |= self.SkyTypes[a]
+ except:
+ raise TypeError, "mode must be one of" % self.SkyTypes.keys()
+ self._object.skyType = flags
+
+
+class Mist:
+ """Mist structure
+
+ Attributes
+
+ start -- start of the mist
+
+ depth -- depth of the "mist wall"
+
+ height -- height of the mist layer
+"""
+
+ Types = { 'quadratic' : 0,
+ 'linear' : 1,
+ 'sqrt' : 2,
+ }
+
+ def __init__(self):
+ self.start = 0.0
+ self.depth = 0.0
+ self.height = 0.0
+ self.type = 0
+
+ def setType(self, name):
+ """Set the Mist type (one of ['quadratic', 'linear', 'sqrt'])"""
+ try:
+ t = self.Types[name]
+ else:
+ raise TypeError, "type must be one of %s" % self.Types.keys()
+ self.type = t
+
+ def getType(self):
+ """Returns the Mist type as string. See setType()"""
+ for k in self.Types.keys():
+ if self.Types[k] == self.type:
+ return k
+
+
diff --git a/intern/python/modules/Blender/__init__.py b/intern/python/modules/Blender/__init__.py
new file mode 100644
index 00000000000..7356d9ddf9f
--- /dev/null
+++ b/intern/python/modules/Blender/__init__.py
@@ -0,0 +1,23 @@
+#
+# The Blender main module wrapper
+# (c) 06/2001, NaN // strubi@blender.nl
+
+__all__ = ["Object", "Image", "NMesh", "Window", "Mesh", "Tools", "sys",
+ "Lamp", "Scene", "Draw", "Camera", "Material", "Types", "Ipo",
+ "BGL"]
+
+import _Blender
+
+Get = _Blender.Get
+Redraw = _Blender.Redraw
+link = _Blender.link
+bylink = _Blender.bylink
+
+import Object, Image, Mesh, Window, Tools, sys, Lamp, Scene, Draw, Camera
+import Material, NMesh, BGL, Types, Ipo, Text
+
+deg = lambda x: 0.0174532925199 * x # conversion from degrees to radians
+
+import __builtin__
+__builtin__.deg = deg
+
diff --git a/intern/python/modules/Blender/shadow.py b/intern/python/modules/Blender/shadow.py
new file mode 100644
index 00000000000..15c5de88f66
--- /dev/null
+++ b/intern/python/modules/Blender/shadow.py
@@ -0,0 +1,195 @@
+#
+# Blender mid level modules
+# author: strubi@blender.nl
+#
+#
+
+"""Shadow class module
+
+ These classes shadow the internal Blender objects
+
+ There is no need for you to use the shadow module really - it is
+ just there for documentation. Blender object classes with a common
+ subset of function members derive from these sub classes.
+"""
+
+
+def _List(list, Wrapper):
+ """This function returns list of wrappers, taking a list of raw objects
+and the wrapper method"""
+ return map(Wrapper, list)
+
+def _getModeBits(dict, attr):
+ list = []
+ for k in dict.keys():
+ i = dict[k]
+ if attr & i:
+ list.append(k)
+ return list
+
+def _setModeBits(dict, args):
+ flags = 0
+ try:
+ for a in args:
+ flags |= dict[a]
+ except:
+ raise TypeError, "mode must be one of %s" % dict.keys()
+ return flags
+
+
+def _link(self, data):
+ """Links Object 'self' with data 'data'. The data type must match
+the Object's type, so you cannot link a Lamp to a mesh type Object"""
+ try:
+ self._object.link(data._object)
+ except:
+ print "Users:", self._object.users
+
+class shadow:
+ """This is the shadow base class"""
+ _getters = {}
+ _setters = {}
+ _emulation = {}
+
+ def __init__(self, object):
+ self._object = object
+
+ def __getattr__(self, a):
+ try:
+ return getattr(self._object, a)
+ except:
+ if self._emulation.has_key(a):
+ return getattr(self._object, self._emulation[a])
+ elif self._getters.has_key(a):
+ return self._getters[a](self)
+ else:
+ raise AttributeError, a
+
+ def __setattr__(self, a, val):
+ if a == "_object":
+ self.__dict__['_object'] = val
+ return
+
+ try:
+ setattr(self.__dict__['_object'], a, val)
+ except:
+ if self._emulation.has_key(a):
+ setattr(self.__dict__['_object'], self._emulation[a], val)
+ elif self._setters.has_key(a):
+ self._setters[a](self, val)
+ else:
+ raise AttributeError, a
+ link = _link
+
+ def rename(self, name):
+ """Tries to set the name of the object to 'name'. If the name already
+exists, a unique name is created by appending a version number (e.g. '.001')
+to 'name'. The effective name is returned."""
+ self._object.name = name
+ return self._object.name
+
+def _getattrEx(self, a):
+ if self._emulation.has_key(a):
+ return getattr(self._object, self._emulation[a])
+ elif self._getters.has_key(a):
+ return self._getters[a](self)
+ else:
+ return getattr(self._object, a)
+
+class shadowEx:
+ """This is the shadow base class with a minor change; check for
+emulation attributes happens before access to the raw object's attributes"""
+ _getters = {}
+ _setters = {}
+ _emulation = {}
+
+ def __del__(self):
+ self.__dict__.clear()
+
+ def __init__(self, object):
+ self._object = object
+
+ def __getattr__(self, a):
+ return _getattrEx(self, a)
+
+ def __setattr__(self, a, val):
+ if a == "_object":
+ self.__dict__['_object'] = val
+ return
+
+ if self._emulation.has_key(a):
+ setattr(self.__dict__['_object'], self._emulation[a], val)
+ elif self._setters.has_key(a):
+ self._setters[a](self, val)
+ else:
+ setattr(self.__dict__['_object'], a, val)
+
+ def __repr__(self):
+ return repr(self._object)
+
+ def rename(self, name):
+ """Tries to set the name of the object to 'name'. If the name already
+exists, a unique name is created by appending a version number (e.g. '.001')
+to 'name'. The effective name is returned."""
+ self._object.name = name
+ return self._object.name
+
+ link = _link
+
+class hasIPO(shadowEx):
+ """Object class which has Ipo curves assigned"""
+
+ def getIpo(self):
+ "Returns the Ipo assigned to 'self'"
+ import Ipo
+ return Ipo.IpoBlock(self._object.ipo)
+
+ def setIpo(self, ipo):
+ "Assigns the IpoBlock 'ipo' to 'self'"
+ return self._object.assignIpo(ipo._object)
+
+ def __getattr__(self, a):
+ if a == "ipo":
+ print "ipo member access deprecated, use self.getIpo() instead!"
+ return self.getIpo()
+ else:
+ return _getattrEx(self, a)
+
+class hasModes(shadowEx):
+ """Object class which has different Modes"""
+ def getMode(self):
+ """Returns a list of the modes which are set for 'self'"""
+ list = []
+ for k in self.Modes.keys():
+ i = self.Modes[k]
+ if self._object.mode & i:
+ list.append(k)
+ return list
+
+ def setMode(self, *args):
+ """Set the mode of 'self'. This function takes a variable number
+of string arguments of the types listed in self.Modes"""
+ flags = 0
+ try:
+ for a in args:
+ flags |= self.Modes[a]
+ except:
+ raise TypeError, "mode must be one of" % self.Modes.keys()
+ self._object.mode = flags
+
+class dict:
+ """readonly dictionary shadow"""
+ _emulation = {}
+
+ def __init__(self, dict):
+ self._dict = dict
+
+ def __getitem__(self, key):
+ try:
+ return self._dict[key]
+ except:
+ key = _emulation[key]
+ return self._dict[key]
+
+ def __repr__(self):
+ return repr(self._dict)
diff --git a/intern/python/modules/Blender/sys.py b/intern/python/modules/Blender/sys.py
new file mode 100644
index 00000000000..f76c0074c1f
--- /dev/null
+++ b/intern/python/modules/Blender/sys.py
@@ -0,0 +1,20 @@
+from _Blender.sys import *
+
+sep = dirsep # path separator ('/' or '\')
+
+class Path:
+ def dirname(self, name):
+ return dirname(name)
+ def join(self, a, *p):
+ path = a
+ for b in p:
+ if b[:1] == dirsep:
+ path = b
+ elif path == '' or path[-1:] == dirsep:
+ path = path + b
+ else:
+ path = path + dirsep + b
+ return path
+
+path = Path()
+
diff --git a/intern/python/modules/Converter/__init__.py b/intern/python/modules/Converter/__init__.py
new file mode 100644
index 00000000000..81db84a66f6
--- /dev/null
+++ b/intern/python/modules/Converter/__init__.py
@@ -0,0 +1,4 @@
+__all__ = ["importer", "importloader"]
+
+import importloader
+
diff --git a/intern/python/modules/Converter/bimporter.py b/intern/python/modules/Converter/bimporter.py
new file mode 100644
index 00000000000..d788d4bccce
--- /dev/null
+++ b/intern/python/modules/Converter/bimporter.py
@@ -0,0 +1,34 @@
+class importer:
+ def __init__(self,writer=None):
+ self.writer = writer
+ self.filename = None
+ self.file = None
+ self.ext = ""
+ def readfile(self, name):
+ file = open(name, "r")
+ if not file:
+ return 0
+ self.file = file
+ self.filename = name
+ self.lines = file.readlines()
+ def close(self):
+ if self.filename:
+ self.file.close()
+ def checkmagic(self, name):
+ # return 1 if magic true (format verified), 0 else
+ return 0
+ def parse(self, data):
+ # parse and convert the data shere
+ pass
+
+class writer:
+ def __init__(self, args = None):
+ pass
+ def mesh(self, me, name):
+ pass
+
+_inst = importer()
+readfile = _inst.readfile
+close = _inst.close
+checkmagic = _inst.checkmagic
+parse = _inst.parse
diff --git a/intern/python/modules/Converter/importer/VRMLimporter.py b/intern/python/modules/Converter/importer/VRMLimporter.py
new file mode 100644
index 00000000000..e2bcea6a51e
--- /dev/null
+++ b/intern/python/modules/Converter/importer/VRMLimporter.py
@@ -0,0 +1,988 @@
+# VRML import prototype
+#
+# strubi@blender.nl
+#
+
+"""VRML import module
+
+ This is a prototype for VRML97 file import
+
+ Supported:
+
+ - Object hierarchies, transform collapsing (optional)
+
+ - Meshes (IndexedFaceSet, no Basic primitives yet)
+
+ - Materials
+
+ - Textures (jpg, tga), conversion option from alien formats
+
+"""
+
+import Blender.sys as os # Blender os emulation
+from beta import Scenegraph
+
+Transform = Scenegraph.Transform
+
+import beta.Objects
+
+_b = beta.Objects
+
+#from Blender import Mesh
+Color = _b.Color
+DEFAULTFLAGS = _b.DEFAULTFLAGS
+FACEFLAGS = _b.FACEFLAGS
+shadowNMesh = _b.shadowNMesh
+
+quat = Scenegraph.quat # quaternion math
+vect = quat.vect # vector math module
+from vrml import loader
+
+#### GLOBALS
+
+OB = Scenegraph.Object.Types # CONST values
+LA = Scenegraph.Lamp.Types
+
+g_level = 1
+g_supported_fileformats = ["jpg", "jpeg", "tga"]
+
+#### OPTIONS
+
+OPTIONS = {'cylres' : 16, # resolution of cylinder
+ 'flipnormals' : 0, # flip normals (force)
+ 'mat_as_vcol' : 0, # material as vertex color - warning, this increases mem usage drastically on big files
+ 'notextures' : 0, # no textures - saves some memory
+ 'collapseDEFs' : 0, # collapse DEF nodes
+ 'collapseTF' : 0, # collapse Transforms (as far as possible,
+ # i.e. currently to Object transform level)
+ }
+
+#### CONSTANTS
+
+LAYER_EMPTY = (1 << 2)
+LAYER_LAMP = (1 << 4)
+LAYER_CAMERA = 1 + (1 << 4)
+
+CREASE_ANGLE_THRESHOLD = 0.45 # radians
+
+PARSE_TIME = (loader.parser.IMPORT_PARSE_TIME )
+PROCESS_TIME = (1.0 - PARSE_TIME )
+PROGRESS_DEPTH = loader.parser.PROGRESS_DEPTH
+VERBOSE_DEPTH = PROGRESS_DEPTH
+
+#### DEBUG
+
+def warn(text):
+ print "###", text
+
+def debug2(text):
+ print (g_level - 1) * 4 * " " + text
+
+def verbose(text):
+ print text
+
+def quiet(text):
+ pass
+
+debug = quiet
+
+#### ERROR message filtering:
+
+g_error = {} # dictionary for non-fatal errors to mark whether an error
+ # was already reported
+
+def clrError():
+ global g_error
+ g_error['toomanyfaces'] = 0
+
+def isError(name):
+ return g_error[name]
+
+def setError(name):
+ global g_error
+ g_error[name] = 1
+
+#### ERROR handling
+
+class baseError:
+ def __init__(self, value):
+ self.value = value
+ def __str__(self):
+ return `self.value`
+
+class MeshError(baseError):
+ pass
+
+UnfinishedError = loader.parser.UnfinishedError
+
+##########################################################
+# HELPER ROUTINES
+
+def assignImage(f, img):
+ f.image = img
+
+def assignUV(f, uv):
+ if len(uv) != len(f.v):
+ uv = uv[:len(f.v)]
+ #raise MeshError, "Number of UV coordinates does not match number of vertices in face"
+ f.uv = []
+ for u in uv:
+ f.uv.append((u[0], u[1])) # make sure it's a tuple
+
+
+#### VRML STUFF
+
+# this is used for transform collapsing
+class TransformStack:
+ def __init__(self):
+ self.stack = [Transform()]
+ def push(self, t):
+ self.stack.append(t)
+ def pop(self):
+ return self.stack.pop()
+ def last(self):
+ return self.stack[-1]
+
+def fromVRMLTransform(tfnode):
+ t = Transform()
+ s = tfnode.scale
+ t.scale = (s[0], s[1], s[2])
+ r = tfnode.rotation
+ if r[0] == 0.0 and r[1] == 0.0 and r[2] == 0.0:
+ rotaxis = (0.0, 0.0, 1.0)
+ ang = 0.0
+ else:
+ rotaxis = vect.norm3(r[:3])
+ ang = r[3]
+
+ #t.rotation = (rotaxis, ang)
+ t.calcRotfromAxis((rotaxis, ang))
+ tr = tfnode.translation
+ t.translation = (tr[0], tr[1], tr[2])
+ # XXX more to come..
+ return t
+
+
+### TODO: enable material later on
+#class dummyMaterial:
+ #def setMode(self, *args):
+ #pass
+
+def fromVRMLMaterial(mat):
+ name = mat.DEF
+ from Blender import Material
+ m = Material.New(name)
+
+ m.rgbCol = mat.diffuseColor
+ m.alpha = 1.0 - mat.transparency
+ m.emit = vect.len3(mat.emissiveColor)
+ if m.Emit > 0.01:
+ if vect.cross(mat.diffuseColor, mat.emissiveColor) > 0.01 * m.Emit:
+ m.rgbCol = mat.emissiveColor
+
+ m.ref = 1.0
+ m.spec = mat.shininess
+ m.specCol = mat.specularColor
+ m.amb = mat.ambientIntensity
+ return m
+
+# override:
+#def fromVRMLMaterial(mat):
+# return dummyMaterial()
+
+def buildVRMLTextureMatrix(tr):
+ from math import sin, cos
+ newMat = vect.Matrix
+ newVec = vect.Vector
+ # rotmatrix
+ s = tr.scale
+ t = tr.translation
+ c = tr.center
+
+ phi = tr.rotation
+
+ SR = newMat()
+ C = newMat()
+ C[2] = newVec(c[0], c[1], 1.0)
+
+ if abs(phi) > 0.00001:
+ SR[0] = newVec(s[0] * cos(phi), s[1] * sin(phi), 0.0)
+ SR[1] = newVec(-s[0] * sin(phi), s[1] * cos(phi), 0.0)
+ else:
+ SR[0] = newVec(s[0], 0.0, 0.0)
+ SR[1] = newVec(0.0, s[1], 0.0)
+
+ SR = C * SR * C.inverse() # rotate & scale about rotation center
+
+ T = newMat()
+ T[2] = newVec(t[0], t[1], 1.0)
+ return SR * T # texture transform matrix
+
+def imageConvert(fromfile, tofile):
+ """This should convert from a image file to another file, type is determined
+automatically (on extension). It's currently just a stub - users can override
+this function to implement their own converters"""
+ return 0 # we just fail in general
+
+def addImage(path, filename):
+ "returns a possibly existing image which is imported by Blender"
+ from Blender import Image
+ img = None
+ try:
+ r = filename.rindex('.')
+ except:
+ return None
+
+ naked = filename[:r]
+ ext = filename[r+1:].lower()
+
+ if path:
+ name = os.sep.join([path, filename])
+ file = os.sep.join([path, naked])
+ else:
+ name = filename
+ file = naked
+
+ if not ext in g_supported_fileformats:
+ tgafile = file + '.tga'
+ jpgfile = file + '.jpg'
+ for f in tgafile, jpgfile: # look for jpg, tga
+ try:
+ img = Image.Load(f)
+ if img:
+ verbose("couldn't load %s (unsupported).\nFound %s instead" % (name, f))
+ return img
+ except IOError, msg:
+ pass
+ try:
+ imgfile = open(name, "rb")
+ imgfile.close()
+ except IOError, msg:
+ warn("Image %s not found" % name)
+ return None
+
+ verbose("Format unsupported, trying to convert to %s" % tgafile)
+ if not imageConvert(name, tgafile):
+ warn("image conversion failed")
+ return None
+ else:
+ return Image.Load(tgafile)
+ return None # failed
+ try:
+ img = Image.Load(name)
+ except IOError, msg:
+ warn("Image %s not found" % name)
+ return img
+ # ok, is supported
+
+def callMethod(_class, method, vnode, newnode, warn = 1):
+ meth = None
+ try:
+ meth = getattr(_class, method)
+ except AttributeError:
+ if warn:
+ unknownType(method)
+ return None, None
+ if meth:
+ return meth(vnode, parent = newnode)
+
+def unknownType(type):
+ warn("unsupported:" + repr(type))
+
+def getChildren(vnode):
+ try:
+ children = vnode.children
+ except:
+ children = None
+ return children
+
+def getNodeType(vnode):
+ return vnode.__gi__
+
+GroupingNodeTypes = ["Group", "Collision", "Anchor", "Billboard", "Inline",
+ "LOD", "Switch", "Transform"]
+
+################################################################################
+#
+#### PROCESSING CLASSES
+
+
+class NullProcessor:
+ def __init__(self, tstack = TransformStack()):
+ self.stack = tstack
+ self.walker = None
+ self.mesh = None
+ self.ObjectNode = Scenegraph.NodefromData # may be altered...
+ self.MaterialCache = {}
+ self.ImageCache = {}
+
+# This is currently not used XXX
+class DEFcollapser(NullProcessor):
+ """This is for collapsing DEF Transform nodes into a single object"""
+ def __init__(self):
+ self.collapsedNodes = []
+
+ def Transform(self, curnode, parent, **kw):
+ name = curnode.DEF
+ if not name: # node is a DEF node
+ return None, None
+
+ return children, None
+
+
+class Processor(NullProcessor):
+ """The processor class defines the handler for a VRML Scenegraph node.
+Definition of a handler method simply happens by use of the VRML Scenegraph
+entity name.
+
+A handler usually creates a new Scenegraph node in the target scenegraph,
+converting the data from the given VRML node.
+
+A handler takes the arguments:
+
+ curnode: the currently visited VRML node
+ parent: the previously generated target scenegraph parent node
+ **kw: additional keywords
+
+It MUST return: (children, newBnode) where:
+ children: the children of the current VRML node. These will be further
+ processed by the processor. If this is not wanted (because they
+ might have been processed by the handler), None must be returned.
+ newBnode: the newly created target node or None.
+ """
+
+ def _handleProto(self, curnode, parent, **kw):
+ p = curnode.PROTO
+ if not p.sceneGraph:
+ print curnode.__gi__, "unsupported"
+ return None, None
+
+ def _dummy(self, curnode, parent, **kw):
+ print curnode.sceneGraph
+ return None, None
+
+ #def __getattr__(self, name):
+ #"""If method is not statically defined, look up prototypes"""
+ #return self._handleProto
+
+ def _currentTransform(self):
+ return self.stack.last()
+
+ def _parent(self, curnode, parent, trans):
+ name = curnode.DEF
+ children = getChildren(curnode)
+ debug("children: %s" % children)
+ objects = []
+ transforms = []
+ groups = []
+ isempty = 0
+ for c in children:
+ type = getNodeType(c)
+ if type == 'Transform':
+ transforms.append(c)
+ elif type in GroupingNodeTypes:
+ groups.append(c)
+ #else:
+ elif hasattr(self, type):
+ objects.append(c)
+ if transforms or groups or len(objects) != 1:
+ # it's an empty
+ if not name:
+ name = 'EMPTY'
+ Bnode = self.ObjectNode(None, OB.EMPTY, name) # empty Blender Object node
+ if options['layers']:
+ Bnode.object.Layer = LAYER_EMPTY
+ Bnode.transform = trans
+ Bnode.update()
+ isempty = 1
+ parent.insert(Bnode)
+ else: # don't insert extra empty if only one object has children
+ Bnode = parent
+
+ for node in objects:
+ c, new = self.walker.walk(node, Bnode)
+ if not isempty: # only apply transform if no extra transform empty in hierarchy
+ new.transform = trans
+ Bnode.insert(new)
+ for node in transforms:
+ self.walker.walk(node, Bnode)
+ for node in groups:
+ self.walker.walk(node, Bnode)
+
+ return None, None
+
+ def sceneGraph(self, curnode, parent, **kw):
+ parent.type = 'ROOT'
+ return curnode.children, None
+
+ def Transform(self, curnode, parent, **kw):
+ # we support 'center' and 'scaleOrientation' by inserting
+ # another Empty in between the Transforms
+
+ t = fromVRMLTransform(curnode)
+ cur = self._currentTransform()
+
+ chainable = 0
+
+ if OPTIONS['collapseTF']:
+ try:
+ cur = cur * t # chain transforms
+ except:
+ cur = self._currentTransform()
+ chainable = 1
+
+ self.stack.push(cur)
+
+ # here comes the tricky hacky transformation conversion
+
+ # TODO: SR not supported yet
+
+ if chainable == 1: # collapse, but not chainable
+ # insert extra transform:
+ Bnode = self.ObjectNode(None, OB.EMPTY, 'Transform') # Empty
+ Bnode.transform = cur
+ parent.insert(Bnode)
+ parent = Bnode
+
+ c = curnode.center
+ if c != [0.0, 0.0, 0.0]:
+ chainable = 1
+ trans = Transform()
+ trans.translation = (-c[0], -c[1], -c[2])
+ tr = t.translation
+ t.translation = (tr[0] + c[0], tr[1] + c[1], tr[2] + c[2])
+
+ Bnode = self.ObjectNode(None, OB.EMPTY, 'C') # Empty
+ Bnode.transform = t
+ parent.insert(Bnode)
+ parent = Bnode
+ else:
+ trans = t
+
+ if chainable == 2: # collapse and is chainable
+ # don't parent, insert into root node:
+ for c in getChildren(curnode):
+ dummy, node = self.walker.walk(c, parent) # skip transform node, insert into parent
+ if node: # a valid Blender node
+ node.transform = cur
+ else:
+ self._parent(curnode, parent, trans)
+
+
+ self.stack.pop()
+ return None, None
+
+ def Switch(self, curnode, parent, **kw):
+ return None, None
+
+ def Group(self, curnode, parent, **kw):
+ if OPTIONS['collapseTF']:
+ cur = self._currentTransform()
+ # don't parent, insert into root node:
+ children = getChildren(curnode)
+ for c in children:
+ dummy, node = self.walker.walk(c, parent) # skip transform node, insert into parent
+ if node: # a valid Blender node
+ node.transform = cur
+ else:
+ t = Transform()
+ self._parent(curnode, parent, t)
+ return None, None
+
+ def Collision(self, curnode, parent, **kw):
+ return self.Group(curnode, parent)
+
+# def LOD(self, curnode, parent, **kw):
+# c, node = self.walker.walk(curnode.level[0], parent)
+# parent.insert(node)
+# return None, None
+
+ def Appearance(self, curnode, parent, **kw):
+ # material colors:
+ mat = curnode.material
+ self.curColor = mat.diffuseColor
+
+ name = mat.DEF
+ if name:
+ if self.MaterialCache.has_key(name):
+ self.curmaterial = self.MaterialCache[name]
+ else:
+ m = fromVRMLMaterial(mat)
+ self.MaterialCache[name] = m
+ self.curmaterial = m
+ else:
+ if curnode.DEF:
+ name = curnode.DEF
+ if self.MaterialCache.has_key(name):
+ self.curmaterial = self.MaterialCache[name]
+ else:
+ m = fromVRMLMaterial(mat)
+ self.MaterialCache[name] = m
+ self.curmaterial = m
+ else:
+ self.curmaterial = fromVRMLMaterial(mat)
+
+ try:
+ name = curnode.texture.url[0]
+ except:
+ name = None
+ if name:
+ if self.ImageCache.has_key(name):
+ self.curImage = self.ImageCache[name]
+ else:
+ self.ImageCache[name] = self.curImage = addImage(self.curpath, name)
+ else:
+ self.curImage = None
+
+ tr = curnode.textureTransform
+ if tr:
+ self.curtexmatrix = buildVRMLTextureMatrix(tr)
+ else:
+ self.curtexmatrix = None
+ return None, None
+
+ def Shape(self, curnode, parent, **kw):
+ name = curnode.DEF
+ debug(name)
+ #self.mesh = Mesh.rawMesh()
+ self.mesh = shadowNMesh()
+ self.mesh.name = name
+
+ # don't mess with the order of these..
+ if curnode.appearance:
+ self.walker.preprocess(curnode.appearance, self.walker.preprocessor)
+ else:
+ # no appearance, get colors from shape (vertex colors)
+ self.curColor = None
+ self.curImage = None
+ self.walker.preprocess(curnode.geometry, self.walker.preprocessor)
+
+ if hasattr(self, 'curmaterial'):
+ self.mesh.assignMaterial(self.curmaterial)
+
+ meshobj = self.mesh.write() # write mesh
+ del self.mesh
+ bnode = Scenegraph.ObjectNode(meshobj, OB.MESH, name)
+ if name:
+ curnode.setTargetnode(bnode) # mark as already processed
+ return None, bnode
+
+ def Box(self, curnode, parent, **kw):
+ col = apply(Color, self.curColor)
+
+ faces = []
+ x, y, z = curnode.size
+ x *= 0.5; y *= 0.5; z *= 0.5
+ name = curnode.DEF
+ m = self.mesh
+ v0 = m.addVert((-x, -y, -z))
+ v1 = m.addVert(( x, -y, -z))
+ v2 = m.addVert(( x, y, -z))
+ v3 = m.addVert((-x, y, -z))
+ v4 = m.addVert((-x, -y, z))
+ v5 = m.addVert(( x, -y, z))
+ v6 = m.addVert(( x, y, z))
+ v7 = m.addVert((-x, y, z))
+
+ flags = DEFAULTFLAGS
+ if not self.curImage:
+ uvflag = 1
+ else:
+ uvflag = 0
+
+ m.addFace([v3, v2, v1, v0], flags, uvflag)
+ m.addFace([v0, v1, v5, v4], flags, uvflag)
+ m.addFace([v1, v2, v6, v5], flags, uvflag)
+ m.addFace([v2, v3, v7, v6], flags, uvflag)
+ m.addFace([v3, v0, v4, v7], flags, uvflag)
+ m.addFace([v4, v5, v6, v7], flags, uvflag)
+
+ for f in m.faces:
+ f.col = [col, col, col, col]
+ return None, None
+
+ def Viewpoint(self, curnode, parent, **kw):
+ t = Transform()
+ r = curnode.orientation
+ name = 'View_' + curnode.description
+ t.calcRotfromAxis((r[:3], r[3]))
+ t.translation = curnode.position
+ Bnode = self.ObjectNode(None, OB.CAMERA, name) # Empty
+ Bnode.object.Layer = LAYER_CAMERA
+ Bnode.transform = t
+ return None, Bnode
+
+ def DirectionalLight(self, curnode, parent, **kw):
+ loc = (0.0, 10.0, 0.0)
+ l = self._lamp(curnode, loc)
+ l.object.data.type = LA.SUN
+ return None, l
+
+ def PointLight(self, curnode, parent, **kw):
+ l = self._lamp(curnode, curnode.location)
+ l.object.data.type = LA.LOCAL
+ return None, l
+
+ def _lamp(self, curnode, location):
+ t = Transform()
+ name = curnode.DEF
+ energy = curnode.intensity
+ t.translation = location
+ Bnode = self.ObjectNode(None, OB.LAMP, "Lamp")
+ Bnode.object.data.energy = energy * 5.0
+ if options['layers']:
+ Bnode.object.Layer = LAYER_LAMP
+ Bnode.transform = t
+ return Bnode
+
+ def IndexedFaceSet(self, curnode, **kw):
+ matxvec = vect.matxvec
+ mesh = self.mesh
+ debug("IFS, read mesh")
+
+ texcoo = curnode.texCoord
+ uvflag = 0
+
+ if curnode.color:
+ colors = curnode.color.color
+ if curnode.colorIndex: # we have color indices
+ colindex = curnode.colorIndex
+ else:
+ colindex = curnode.coordIndex
+ if not texcoo:
+ uvflag = 1
+ else:
+ colors = None
+
+ faceflags = DEFAULTFLAGS
+
+ if not texcoo and OPTIONS['mat_as_vcol'] and self.curColor:
+ uvflag = 1
+ col = apply(Color, self.curColor)
+ elif self.curImage:
+ faceflags += FACEFLAGS.TEX
+
+# MAKE VERTICES
+
+ coo = curnode.coord
+ ncoo = len(coo.point)
+
+ if curnode.normal: # normals defined
+ normals = curnode.normal.vector
+ if curnode.normalPerVertex and len(coo.point) == len(normals):
+ self.mesh.recalc_normals = 0
+ normindex = curnode.normalIndex
+ i = 0
+ for v in coo.point:
+ newv = mesh.addVert(v)
+ n = newv.no
+ n[0], n[1], n[2] = normals[normindex[i]]
+ i += 1
+ else:
+ for v in coo.point:
+ mesh.addVert(v)
+ else:
+ for v in coo.point:
+ mesh.addVert(v)
+ if curnode.creaseAngle < CREASE_ANGLE_THRESHOLD:
+ self.mesh.smooth = 1
+
+ nvertices = len(mesh.vertices)
+ if nvertices != ncoo:
+ print "todo: %d, done: %d" % (ncoo, nvertices)
+ raise RuntimeError, "FATAL: could not create all vertices"
+
+# MAKE FACES
+
+ index = curnode.coordIndex
+ vlist = []
+
+ flip = OPTIONS['flipnormals']
+ facecount = 0
+ vertcount = 0
+
+ cols = []
+ if curnode.colorPerVertex: # per vertex colors
+ for i in index:
+ if i == -1:
+ if flip or (curnode.ccw == 0 and not flip): # counterclockwise face def
+ vlist.reverse()
+ f = mesh.addFace(vlist, faceflags, uvflag)
+ if uvflag or colors:
+ f.col = cols
+ cols = []
+ vlist = []
+ else:
+ if colors:
+ col = apply(Color, colors[colindex[vertcount]])
+ cols.append(col)
+ vertcount += 1
+ v = mesh.vertices[i]
+ vlist.append(v)
+ else: # per face colors
+ for i in index:
+ if i == -1:
+ if flip or (curnode.ccw == 0 and not flip): # counterclockwise face def
+ vlist.reverse()
+ f = mesh.addFace(vlist, faceflags, uvflag)
+ facecount += 1
+
+ if colors:
+ col = apply(Color, colors[colindex[facecount]])
+ cols = len(f.v) * [col]
+
+ if uvflag or colors:
+ f.col = cols
+ vlist = []
+ else:
+ v = mesh.vertices[i]
+ vlist.append(v)
+
+# TEXTURE COORDINATES
+
+ if not texcoo:
+ return None, None
+
+ self.curmaterial.setMode("traceable", "shadow", "texFace")
+ m = self.curtexmatrix
+ if m: # texture transform exists:
+ for uv in texcoo.point:
+ v = (uv[0], uv[1], 1.0)
+ v1 = matxvec(m, v)
+ uv[0], uv[1] = v1[0], v1[1]
+
+ UVindex = curnode.texCoordIndex
+ if not UVindex:
+ UVindex = curnode.coordIndex
+ # go assign UVs
+ self.mesh.hasFaceUV(1)
+ j = 0
+ uv = []
+ for i in UVindex:
+ if i == -1: # flush
+ if not curnode.ccw:
+ uv.reverse()
+ assignUV(f, uv)
+ assignImage(f, self.curImage)
+ uv = []
+ j +=1
+ else:
+ f = mesh.faces[j]
+ uv.append(texcoo.point[i])
+ return None, None
+
+class PostProcessor(NullProcessor):
+ def Shape(self, curnode, **kw):
+ pass
+ return None, None
+ def Transform(self, curnode, **kw):
+ return None, None
+
+class Walker:
+ """The node visitor (walker) class for VRML nodes"""
+ def __init__(self, pre, post = NullProcessor(), progress = None):
+ self.scene = Scenegraph.BScene()
+ self.preprocessor = pre
+ self.postprocessor = post
+ pre.walker = self # processor knows about walker
+ post.walker = self
+ self.nodes = 1
+ self.depth = 0
+ self.progress = progress
+ self.processednodes = 0
+
+ def walk(self, vnode, parent):
+ """Essential walker routine. It walks along the scenegraph nodes and
+processes them according to its pre/post processor methods.
+
+The preprocessor methods return the children of the node remaining
+to be processed or None. Also, a new created target node is returned.
+If the target node is == None, the current node will be skipped in the
+target scenegraph generation. If it is a valid node, the walker routine
+inserts it into the 'parent' node of the target scenegraph, which
+must be a valid root node on first call, leading us to the example usage:
+
+ p = Processor()
+ w = Walker(p, PostProcessor())
+ root = Scenegraph.RootNode()
+ w.walk(SG, root) # SG is a VRML scenegraph
+ """
+ global g_level #XXX
+ self.depth += 1
+ g_level = self.depth
+ if self.depth < PROGRESS_DEPTH:
+ self.processednodes += 1
+ if self.progress:
+ ret = self.progress(PARSE_TIME + PROCESS_TIME * float(self.processednodes) / self.nodes)
+ if not ret:
+ progress(1.0)
+ raise UnfinishedError, "User cancelled conversion"
+
+ # if vnode has already been processed, call Linker method, Processor method otherwise
+ id = vnode.DEF # get name
+ if not id:
+ id = 'Object'
+
+ processed = vnode.getTargetnode()
+ if processed: # has been processed ?
+ debug("linked obj: %s" % id)
+ children, bnode = self.link(processed, parent)
+ else:
+ children, bnode = self.preprocess(vnode, parent)
+
+ if not bnode:
+ bnode = parent # pass on
+ else:
+ parent.insert(bnode) # insert into SG
+
+ if children:
+ for c in children:
+ self.walk(c, bnode)
+ if not processed:
+ self.postprocess(vnode, bnode)
+
+ self.depth -= 1
+
+ return children, bnode
+
+ def link(self, bnode, parent):
+ """Link already processed data"""
+ # link data:
+ new = bnode.clone()
+ if not new:
+ raise RuntimeError, "couldn't clone object"
+ return None, new
+
+ def preprocess(self, vnode, newnode = None):
+ """Processes a VRML node 'vnode' and returns a custom node. The processor must
+be specified in 'p'.
+Optionally, a custom parent node (previously created) is passed as 'newnode'."""
+
+ pre = "pre"
+
+ nodetype = vnode.__gi__
+
+ debug(pre + "process:" + repr(nodetype) + " " + vnode.DEF)
+ return callMethod(self.preprocessor, nodetype, vnode, newnode)
+
+ def postprocess(self, vnode, newnode = None):
+ """Postprocessing of a VRML node, see Walker.preprocess()"""
+
+ nodetype = vnode.__gi__
+ pre = "post"
+
+ debug(pre + "process:" + repr(nodetype) + " " + vnode.DEF)
+ return callMethod(self.postprocessor, nodetype, vnode, newnode, 0)
+
+testfile2 = '/home/strubi/exotic/wrl/BrownTrout1.wrl'
+testfile = '/home/strubi/exotic/wrl/examples/VRML_Model_HSL.wrl'
+
+def fix_VRMLaxes(root, scale):
+ from Blender import Object, Scene
+ q = quat.fromRotAxis((1.0, 0.0, 0.0), 1.57079)
+ empty = Object.New(OB.EMPTY)
+ empty.layer = LAYER_EMPTY
+ Scene.getCurrent().link(empty)
+ node = Scenegraph.ObjectNode(empty, None, "VRMLscene")
+ node.transform.rotation = q
+ if scale:
+ node.transform.scale = (0.01, 0.01, 0.01)
+ for c in root.children:
+ node.insert(c)
+ node.update()
+ root.children = [node]
+
+#################################################################
+# these are the routines that must be provided for the importer
+# interface in blender
+
+def checkmagic(name):
+ "check for file magic"
+ f = open(name, "r")
+ magic = loader.getFileType(f)
+ f.close()
+ if magic == 'vrml':
+ return 1
+ elif magic == 'gzip':
+ verbose("gzipped file detected")
+ try:
+ import gzip
+ except ImportError, value:
+ warn("Importing gzip module: %s" % value)
+ return 0
+
+ f = gzip.open(name, 'rb')
+ header = f.readline()
+ f.close()
+ if header[:10] == "#VRML V2.0":
+ return 1
+ else:
+ return 0
+ print "unknown file"
+ return 0
+
+g_infotxt = ""
+
+def progress(done):
+ from Blender import Window
+ ret = Window.draw_progressbar(done, g_infotxt)
+ return ret
+
+class Counter:
+ def __init__(self):
+ self._count = 0
+ self.depth = 0
+ def count(self, node):
+ if self.depth >= PROGRESS_DEPTH:
+ return 0
+
+ self.depth += 1
+ self._count += 1
+ if not getChildren(node):
+ self.depth -= 1
+ return 0
+ else:
+ for c in node.children:
+ self.count(c)
+ self.depth -= 1
+ return self._count
+
+################################################################################
+# MAIN ROUTINE
+
+def importfile(name):
+
+ global g_infotxt
+ global options
+ global DEFAULTFLAGS
+
+ from Blender import Get # XXX
+ options = Get('vrmloptions')
+ DEFAULTFLAGS = FACEFLAGS.LIGHT + FACEFLAGS.DYNAMIC
+ if options['twoside']:
+ print "TWOSIDE"
+ DEFAULTFLAGS |= FACEFLAGS.TWOSIDE
+ clrError()
+ g_infotxt = "load & parse file..."
+ progress(0.0)
+ root = Scenegraph.RootNode()
+ try:
+ l = loader.Loader(name, progress)
+ SG = l.load()
+ p = Processor()
+ w = Walker(p, PostProcessor(), progress)
+ g_infotxt = "convert data..."
+ p.curpath = os.path.dirname(name)
+ print "counting nodes...",
+ c = Counter()
+ nodes = c.count(SG)
+ print "done."
+ w.nodes = nodes # let walker know about number of nodes parsed # XXX
+ w.walk(SG, root)
+ except UnfinishedError, msg:
+ print msg
+
+ progress(1.0)
+ fix_VRMLaxes(root, options['autoscale']) # rotate coordinate system: in VRML, y is up!
+ root.update() # update baselist for proper display
+ return root
diff --git a/intern/python/modules/Converter/importer/__init__.py b/intern/python/modules/Converter/importer/__init__.py
new file mode 100644
index 00000000000..003e2733b17
--- /dev/null
+++ b/intern/python/modules/Converter/importer/__init__.py
@@ -0,0 +1,17 @@
+"""This module contains a list of valid importers in 'importers'. At runtime,
+importer modules can be registered by the 'register' function."""
+
+__all__ = ["VRMLimporter"]
+
+importers = __all__
+
+import VRMLimporter
+
+def register(importer):
+ """Register an file importer"""
+ methods = ["checkmagic", "importfile"]
+ for m in methods:
+ if not hasattr(importer, m):
+ raise TypeError, "This is not an importer"
+ importers.append(importer)
+
diff --git a/intern/python/modules/Converter/importloader.py b/intern/python/modules/Converter/importloader.py
new file mode 100644
index 00000000000..245ab108d1f
--- /dev/null
+++ b/intern/python/modules/Converter/importloader.py
@@ -0,0 +1,23 @@
+# this is the importloader which blender calls on unknown
+# file types
+
+import importer
+
+supported= {'wrl': importer.VRMLimporter}
+
+def process(name):
+ # run through importerlist and check for magic
+ m = None
+ for modname in importer.importers:
+ mod = getattr(importer, modname)
+ if mod.checkmagic(name):
+ m = mod
+ break
+ if not m:
+ return 0
+ m.importfile(name)
+ #except:
+ #import sys
+ #print "Import failed"sys.exc_value
+ return 1
+
diff --git a/intern/python/modules/TextTools/Constants/Sets.py b/intern/python/modules/TextTools/Constants/Sets.py
new file mode 100644
index 00000000000..bf260aa3e0c
--- /dev/null
+++ b/intern/python/modules/TextTools/Constants/Sets.py
@@ -0,0 +1,39 @@
+""" Constants for sets (of characters)
+
+ (c) Copyright Marc-Andre Lemburg; All Rights Reserved.
+ See the documentation for further information on copyrights,
+ or contact the author (mal@lemburg.com).
+"""
+import string
+
+# Simple character strings
+
+a2z = 'abcdefghijklmnopqrstuvwxyz'
+A2Z = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'
+umlaute = 'äöüß'
+Umlaute = 'ÄÖÜ'
+alpha = A2Z + a2z
+german_alpha = A2Z + a2z + umlaute + Umlaute
+number = '0123456789'
+alphanumeric = alpha + number
+white = ' \t\v'
+newline = '\r\n'
+formfeed = '\f'
+whitespace = white + newline + formfeed
+any = '\000\001\002\003\004\005\006\007\010\011\012\013\014\015\016\017\020\021\022\023\024\025\026\027\030\031\032\033\034\035\036\037 !"#$%&\'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~\177\200\201\202\203\204\205\206\207\210\211\212\213\214\215\216\217\220\221\222\223\224\225\226\227\230\231\232\233\234\235\236\237\240\241\242\243\244\245\246\247\250\251\252\253\254\255\256\257\260\261\262\263\264\265\266\267\270\271\272\273\274\275\276\277\300\301\302\303\304\305\306\307\310\311\312\313\314\315\316\317\320\321\322\323\324\325\326\327\330\331\332\333\334\335\336\337\340\341\342\343\344\345\346\347\350\351\352\353\354\355\356\357\360\361\362\363\364\365\366\367\370\371\372\373\374\375\376\377'
+
+# Precompiled as sets, e.g. a2z_set = set(a2z)
+a2z_set = '\000\000\000\000\000\000\000\000\000\000\000\000\376\377\377\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000'
+A2Z_set = '\000\000\000\000\000\000\000\000\376\377\377\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000'
+alpha_set = '\000\000\000\000\000\000\000\000\376\377\377\007\376\377\377\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000'
+german_alpha_set = '\000\000\000\000\000\000\000\000\376\377\377\007\376\377\377\007\000\000\000\000\000\000\000\000\020\000@\220\020\000@\020'
+number_set = '\000\000\000\000\000\000\377\003\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000'
+alphanumeric_set = '\000\000\000\000\000\000\377\003\376\377\377\007\376\377\377\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000'
+white_set = '\000\002\000\000\001\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000'
+newline_set = '\000$\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000'
+whitespace_set = '\000&\000\000\001\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000'
+nonwhitespace_set = '\377\301\377\377\376\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377'
+any_set = '\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377'
+
+# Clean up
+del string
diff --git a/intern/python/modules/TextTools/Constants/TagTables.py b/intern/python/modules/TextTools/Constants/TagTables.py
new file mode 100644
index 00000000000..315d825b94e
--- /dev/null
+++ b/intern/python/modules/TextTools/Constants/TagTables.py
@@ -0,0 +1,348 @@
+""" Constants for writing tag tables
+
+ The documentation in this file is obsoleted by the HTML docs in
+ the Doc/ subdirectory of the package. Constants defined here must
+ match those in mxTextTools/mxte.h.
+
+ (c) Copyright Marc-Andre Lemburg; All Rights Reserved.
+ See the documentation for further information on copyrights,
+ or contact the author (mal@lemburg.com).
+"""
+#########################################################################
+# This file contains the definitions and constants used by the tagging
+# engine:
+#
+# 1. Matching Tables
+# 2. Commands & Constants
+# 3. Matching Functions
+# 4. Callable tagobjects
+# 5. Calling the engine & Taglists
+#
+
+#########################################################################
+# 1. Matching Tables:
+#
+# these are tuples of tuples, each entry having the following meaning:
+#
+# syntax: (tag, cmd, chars|table|fct [,jne] [,je=1])
+# tag = object used to mark this section, if it matches
+# cmd = command (see below)
+# chars = match one or more of these characters
+# table = table to use for matching characters
+# fct = function to call (see below)
+# jne = if the current character doesn't match, jump this
+# many table entries relative to the current entry
+# je = if we have a match make a relative jump of this length
+#
+# * a table matches a string iff the end of the table is reached
+# (that is: an index is requested that is beyond the end-of-table)
+# * a table is not matched if a tag is not matched and no jne is given;
+# if it is matched then processing simply moves on to the next entry
+# * marking is done by adding the matching slice in the string
+# together with the marking object to the tag list; if the object is
+# None, then it will not be appended to the taglist list
+# * if the flag CallTag is set in cmd, then instead of appending
+# matches to the taglist, the tagobj will be called (see below)
+#
+# TIP: if you are getting an error 'call of a non-function' while
+# writing a table definition, you probably have a missing ','
+# somewhere in the tuple !
+#
+# For examples see the tag*.py - files that came with this engine.
+#
+
+#########################################################################
+# 2. Commands & Constants
+#
+#
+
+#
+# some useful constants for writing matching tables
+#
+
+To = None # good for cmd=Jump
+Here = None # good for cmd=Fail and EOF
+MatchOk = 20000 # somewhere beyond the end of the tag table...
+MatchFail = -20000 # somewhere beyond the start of the tag table...
+ToEOF = -1 # good for cmd=Move
+
+ThisTable = 999 # to recursively match using the current table;
+ # can be passed as argument to Table and SubTable
+ # instead of a tuple
+
+#
+# commands and flags passed in cmd (see below)
+#
+# note: I might add some further commands to this list, if needed
+# (the numbers will then probably change, but not the
+# names)
+#
+# convention: a command "matches", if and only if it moves the
+# current position at least one character; a command "reads"
+# characters the characters, if they match ok
+#
+# notations:
+#
+# x refers to the current position in the string
+# len refers to the string length or what the function tag() is told to
+# believe it to be (i.e. the engine only looks at the slice text[x:len])
+# text refers to the text string
+# jne is the optional relative jump distance in case the command
+# did not match, i.e. x before and after applying the command
+# are the same (if not given the current table is considered
+# not to match)
+# je is the optional relative jump distance in case the command
+# did match (it defaults to +1)
+#
+
+# commands
+Fail = 0 # this will always fail (position remains unchanged)
+Jump = 0 # jump to jne (position remains unchanged)
+
+# match & read chars
+AllIn = 11 # all chars in match (at least one)
+AllNotIn = 12 # all chars not in match (at least one)
+Is = 13 # current char must be == match (matches one char)
+IsIn = 14 # current char must be in match (matches one char)
+IsNot = 15 # current char must be be != match (matches one char)
+IsNotIn = 15 # current char must be not be in match (matches one char)
+
+AllInSet = 31
+IsInSet = 32
+
+# match & read for whole words
+Word = 21 # the next chars must be those in match
+WordStart = 22 # all chars up to the first occ. of match (at least one)
+WordEnd = 23 # same as WordStart, accept that the text pointer
+ # is moved behind the match
+NoWord = WordStart # all chars up to the first occ. of match (at least one)
+
+
+# match using search objects BMS or FS
+sWordStart = 111 # all chars up to the first occ. of match (may be 0 chars)
+sWordEnd = 112 # same as WordStart, accept that the text pointer
+ # is moved behind the match
+sFindWord = 113 # find match and process the found slice only (ignoring
+ # the chars that lead up to the match); positions
+ # the text pointer right after the match like WordEnd
+
+# functions & tables
+Call = 201 # call match(text,x,len) as function (see above)
+CallArg = 202 # match has to be a 2-tuple (fct,arg), then
+ # fct(text,x,len,arg) is called; the return value is taken
+ # as new x; it is considered matching if the new x is
+ # different than the x before the call -- like always
+ # (note: arg has to be *one* object, e.g. a tuple)
+Table = 203 # match using table (given in match)
+SubTable = 207 # match using sub table (given in match); the sub table
+ # uses the same taglist as the calling table
+TableInList = 204 # same as Table, but match is a tuple (list,index)
+ # and the table list[index] is used as matching
+ # table
+SubTableInList = 208
+ # same as TableInList, but the sub table
+ # uses the same taglist as the calling table
+
+# specials
+EOF = 1 # current position must be EOF, e.g. >= len(string)
+Skip = 2 # skip match (must be an integer) chars; note: this cmd
+ # always matches ok, so jne doesn't have any meaning in
+ # this context
+Move = 3 # move the current text position to match (if negative,
+ # the text length + 1 (!) is added, thus -1 moves to the
+ # EOF, -2 to the last char and so on); note: this cmd
+ # always matches ok, so jne doesn't have any meaning in
+ # this context
+
+# loops
+Loop = 205 # loop-construct
+ #
+ # (tagobj,Loop,Count,jne,je) - sets/decrements the
+ # loop variable for current table according to the
+ # following rules:
+ # 1. the first time the engine passes this entry
+ # sets the loop variable to Count and continues
+ # without reading any character, but saving the
+ # current position in text
+ # 2. the next time, it decrements the loop variable
+ # and checks if it is < 0:
+ # (a) if it is, then the tagobj is added to the
+ # taglist with the slice (saved position, current
+ # position) and processing continues at entry
+ # current + jne
+ # (b) else, processing continues at entry current + je
+ # Note: if you jump out of the loop while the loop
+ # variable is still > 0, then you *must*
+ # reset the loop mechanism with
+ # (None,LoopControl,Reset)
+ # Note: you can skip the remaining loops by calling
+ # (None,LoopControl,Break) and jumping back
+ # to the Loop-entry; this sets the loop
+ # variable to 0
+ # Note: tables cannot have nested loops within their
+ # context; you can have nested loops in nested
+ # tables though (there is one loop var per
+ # tag()-call which takes place every time
+ # a table match is done)
+ #
+LoopControl = 206 # controls the loop variable (always succeeds, i.e.
+ # jne has no meaning);
+ # match may be one of:
+Break = 0 # * sets the loop variable to 0, thereby allowing
+ # to skip the remaining loops
+Reset = -1 # * resets the loop mechanism (see note above)
+ #
+ # See tagLoop.py for some examples.
+
+##########################################################################
+#
+# Flags (to be '+'ed with the above command code)
+#
+CallTag = 256 # call tagobj(taglist,text,l,r,subtags)
+ # upon successfully matching the slice [l:r] in text
+ # * taglist is the current list tags found (may be None)
+ # * subtags is a sub-list, passed when a subtable was used
+ # to do the matching -- it is None otherwise !)
+#
+# example entry with CallTag-flag set:
+#
+# (found_a_tag,CallTag+Table,tagtable)
+# -- if tagtable matches the current text position,
+# found_a_tag(taglist,text,l,r,newtaglist) is called and
+# the match is *not* appended to the taglist by the tagging
+# engine (the function would have to do this, in case it is needed)
+
+AppendToTagobj = 512 # this appends the slice found to the tagobj, assuming
+ # that it is a Python list:
+ # does a tagobj.append((None,l,r,subtags)) call
+# Alias for b/w comp.
+AppendToTag = AppendToTagobj
+
+AppendTagobj = 1024 # don't append (tagobj,l,r,subtags) to the taglist,
+ # but only tagobj itself; the information in l,r,subtags
+ # is lost, yet this can be used to write tag tables
+ # whose output can be used directly by tag.join()
+
+AppendMatch = 2048 # append the match to the taglist instead of
+ # the tag object; this produces non-standard
+ # taglists !
+
+#########################################################################
+# 3. Matching Functions
+#
+# syntax:
+#
+# fct(s,x,len_s)
+# where s = string we are working on
+# x = current index in s where we wnat to match something
+# len_s = 'length' of s, this is how far the search may be
+# conducted in s, not necessarily the true length of s
+#
+# * the function has to return the index of the char right after
+# matched string, e.g.
+#
+# 'xyzabc' ---> 'xyz' matches ---> return x+3
+#
+# * if the string doesn't match simply return x; in other words:
+# the function has to return the matching slice's right index
+# * you can use this to match e.g. 10 characters of a certain kind,
+# or any word out of a given list, etc.
+# * note: you cannot give the function additional parameters from within
+# the matching table, so it has to know everything it needs to
+# know a priori; use dynamic programming !
+#
+# some examples (not needed, since all are implemented by commands)
+#
+#
+#def matchword(x):
+# s = """
+#def a(s,x,len_text):
+# y = x+%i
+# if s[x:y] == %s: return y
+# return x
+#"""
+# exec s % (len(x),repr(x))
+# return a
+#
+#def rejectword(x):
+# s = """
+#def a(s,x,len_text):
+# while x < len(s) and s[x:x+%i] != %s:
+# x = x + 1
+# return x
+#"""
+# exec s % (len(x),repr(x))
+# return a
+#
+#def HTML_Comment(s,x,len_text):
+# while x < len_text and s[x:x+3] != '-->':
+# x = x + 1
+# return x
+#
+#
+
+#########################################################################
+# 4. Callable tagobjects
+#
+# a sample callable tagobj:
+#
+#
+#def test(taglist,text,l,r,newtaglist):
+#
+# print 'found',repr(text[l:r])[:40],(l,r)
+#
+#
+
+#########################################################################
+# 5. Calling the engine & Taglists
+#
+# The function
+# tag(text,table,start=0,len_text=len(text),taglistinit=[])
+# found in mxTextTools:
+#
+# This function does all the matching according to the above rules.
+# You give it a text string and a tag table and it will
+# start processing the string starting from 'start' (which defaults to 0)
+# and continue working until it reaches the 'EOF', i.e. len_text (which
+# defaults to the text length). It thus tags the slice text[start:len_text].
+#
+# The function will create a list of found tags in the following
+# format (which I call taglist):
+#
+# (tagobj,l,r,subtaglist)
+#
+# where: tagobj = specified tag object taken from the table
+# [l:r] = slice that matched the tag in text
+# subtaglist = if matching was done using a subtable
+# this is the taglist it produced; in all other
+# cases this will be None
+#
+# * if you pass None as taglistinit, then no taglist will be created,
+# i.e. only CallTag commands will have any effect. (This saves
+# temporary memory for big files)
+# * the function returns a tuple:
+# (success, taglist, nextindex)
+# where: success = 0/1
+# taglist = the produced list or None
+# nextindex = the index+1 of the last char that matched
+# (in case of failure, this points to the beginning
+# of the substring that caused the problem)
+#
+
+### Module init.
+
+def _module_init():
+
+ global id2cmd
+
+ import types
+ id2cmd = {}
+ IntType = types.IntType
+ for cmd,value in globals().items():
+ if type(value) == IntType:
+ if value == 0:
+ id2cmd[0] = 'Fail/Jump'
+ else:
+ id2cmd[value] = cmd
+
+_module_init()
diff --git a/intern/python/modules/TextTools/Constants/__init__.py b/intern/python/modules/TextTools/Constants/__init__.py
new file mode 100644
index 00000000000..0519ecba6ea
--- /dev/null
+++ b/intern/python/modules/TextTools/Constants/__init__.py
@@ -0,0 +1 @@
+ \ No newline at end of file
diff --git a/intern/python/modules/TextTools/TextTools.py b/intern/python/modules/TextTools/TextTools.py
new file mode 100644
index 00000000000..7eae2bcfc39
--- /dev/null
+++ b/intern/python/modules/TextTools/TextTools.py
@@ -0,0 +1,766 @@
+""" mxTextTools - A tools package for fast text processing.
+
+ (c) Copyright Marc-Andre Lemburg; All Rights Reserved.
+ See the documentation for further information on copyrights,
+ or contact the author (mal@lemburg.com).
+"""
+import string,types
+
+#
+# import the C module and the version number
+#
+from mxTextTools import *
+from mxTextTools import __version__
+
+#
+# import the symbols needed to write tag tables
+#
+from Constants.TagTables import *
+
+#
+# import the some handy character sets
+#
+from Constants.Sets import *
+
+#
+# format and print tables, taglists and joinlists:
+#
+def format_entry(table,i,
+
+ TupleType=types.TupleType):
+
+ """ Returns a pp-formatted tag table entry as string
+ """
+ e = table[i]
+ jne = 0
+ je = 1
+ t,c,m = e[:3]
+ if len(e)>3: jne = e[3]
+ if len(e)>4: je = e[4]
+ flags,cmd = divmod(c,256)
+ c = id2cmd[cmd]
+ if type(m) == TupleType and c in ('Table','SubTable'):
+ m = '<table>'
+ elif m == None:
+ m = 'Here/To'
+ else:
+ m = repr(m)
+ if len(m) > 17:
+ m = m[:17]+'...'
+ return '%-15.15s : %-30s : jne=%+i : je=%+i' % \
+ (repr(t),'%-.15s : %s'%(c,m),jne,je)
+
+def format_table(table,i=-1):
+
+ """ Returns a pp-formatted version of the tag table as string """
+
+ l = []
+ for j in range(len(table)):
+ if i == j:
+ l.append('--> '+format_entry(table,j))
+ else:
+ l.append(' '+format_entry(table,j))
+ return string.join(l,'\n')+'\n'
+
+def print_tagtable(table):
+
+ """ Print the tag table
+ """
+ print format_table(table)
+
+def print_tags(text,tags,indent=0):
+
+ """ Print the taglist tags for text using the given indent level
+ """
+ for tag,l,r,subtags in tags:
+ tagname = repr(tag)
+ if len(tagname) > 20:
+ tagname = tagname[:20] + '...'
+ target = repr(text[l:r])
+ if len(target) > 60:
+ target = target[:60] + '...'
+ if subtags == None:
+ print ' '+indent*' |',tagname,': ',target,(l,r)
+ else:
+ print ' '+indent*' |',tagname,': ',target,(l,r)
+ print_tags(text,subtags,indent+1)
+
+def print_joinlist(joins,indent=0,
+
+ StringType=types.StringType):
+
+ """ Print the joinlist joins using the given indent level
+ """
+ for j in joins:
+ if type(j) == StringType:
+ text = repr(j)
+ if len(text) > 40:
+ text = text[:40] + '...'
+ print ' '+indent*' |',text,' (len = %i)' % len(j)
+ else:
+ text = j[0]
+ l,r = j[1:3]
+ text = repr(text[l:r])
+ if len(text) > 40:
+ text = text[:40] + '...'
+ print ' '+indent*' |',text,' (len = %i)' % (r-l),(l,r)
+
+def normlist(jlist,
+
+ StringType=types.StringType):
+
+ """ Return a normalized joinlist.
+
+ All tuples in the joinlist are turned into real strings. The
+ resulting list is a equivalent copy of the joinlist only
+ consisting of strings.
+
+ """
+ l = [''] * len(jlist)
+ for i in range(len(jlist)):
+ entry = jlist[i]
+ if type(entry) == StringType:
+ l[i] = entry
+ else:
+ l[i] = entry[0][entry[1]:entry[2]]
+ return l
+
+#
+# aid for matching from a list of words
+#
+def _lookup_dict(l,index=0):
+
+ d = {}
+ for w in l:
+ c = w[index]
+ if d.has_key(c):
+ d[c].append(w)
+ else:
+ d[c] = [w]
+ return d
+
+def word_in_list(l):
+
+ """ Creates a lookup table that matches the words in l
+ """
+ t = []
+ d = _lookup_dict(l)
+ keys = d.keys()
+ if len(keys) < 18: # somewhat arbitrary bound
+ # fast hint for small sets
+ t.append((None,IsIn,string.join(d.keys(),'')))
+ t.append((None,Skip,-1))
+ # test groups
+ for c, group in d.items():
+ t.append(None) # hint will be filled in later
+ i = len(t)-1
+ for w in group:
+ t.append((None,Word,w[1:],+1,MatchOk))
+ t.append((None,Fail,Here))
+ # add hint
+ t[i] = (None,Is,c,len(t)-i)
+ t.append((None,Fail,Here))
+ return tuple(t)
+
+#
+# Extra stuff useful in combination with the C functions
+#
+
+def replace(text,what,with,start=0,stop=None,
+
+ SearchObject=BMS,join=join,joinlist=joinlist,tag=tag,
+ string_replace=string.replace,type=type,
+ StringType=types.StringType):
+
+ """A fast replacement for string.replace.
+
+ what can be given as string or search object.
+
+ This function is a good example for the AppendTagobj-flag usage
+ (the taglist can be used directly as joinlist).
+
+ """
+ if type(what) == StringType:
+ so = SearchObject(what)
+ else:
+ so = what
+ what = so.match
+ if stop is None:
+ if start == 0 and len(what) < 2:
+ return string_replace(text,what,with)
+ stop = len(text)
+ t = ((text,sWordStart,so,+2),
+ # Found something, replace and continue searching
+ (with,Skip+AppendTagobj,len(what),-1,-1),
+ # Rest of text
+ (text,Move,ToEOF)
+ )
+ found,taglist,last = tag(text,t,start,stop)
+ if not found:
+ return text
+ return join(taglist)
+
+# Alternative (usually slower) versions using different techniques:
+
+def _replace2(text,what,with,start=0,stop=None,
+
+ join=join,joinlist=joinlist,tag=tag,
+ StringType=types.StringType,BMS=BMS):
+
+ """Analogon to string.replace; returns a string with all occurences
+ of what in text[start:stop] replaced by with
+ - uses a one entry tag-table and a Boyer-Moore-Search-object
+ - what can be a string or a BMS/FS search object
+ - it's faster than string.replace in those cases, where
+ the what-string gets long and/or many replacements are found;
+ faster meaning from a few percent up to many times as fast
+ - start and stop define the slice of text to work in
+ - stop defaults to len(text)
+ """
+ if stop is None:
+ stop = len(text)
+ if type(what) == StringType:
+ what=BMS(what)
+ t = ((with,sFindWord,what,+1,+0),)
+ found,taglist,last = tag(text,t,start,stop)
+ if not found:
+ return text
+ return join(joinlist(text,taglist))
+
+def _replace3(text,what,with,
+
+ join=string.join,FS=FS,
+ StringType=types.StringType):
+
+ if type(what) == StringType:
+ what=FS(what)
+ slices = what.findall(text)
+ if not slices:
+ return text
+ l = []
+ x = 0
+ for left,right in slices:
+ l.append(text[x:left] + with)
+ x = right
+ l.append(text[x:])
+ return join(l,'')
+
+def _replace4(text,what,with,
+
+ join=join,joinlist=joinlist,tag=tag,FS=FS,
+ StringType=types.StringType):
+
+ if type(what) == StringType:
+ what=FS(what)
+ slices = what.findall(text)
+ if not slices:
+ return text
+ repl = [None]*len(slices)
+ for i in range(len(slices)):
+ repl[i] = (with,)+slices[i]
+ return join(joinlist(text,repl))
+
+
+def find(text,what,start=0,stop=None,
+
+ SearchObject=FS):
+
+ """ A faster replacement for string.find().
+
+ Uses a search object for the task. Returns the position of the
+ first occurance of what in text[start:stop]. stop defaults to
+ len(text). Returns -1 in case no occurance was found.
+
+ """
+ if stop:
+ return SearchObject(what).find(text,start,stop)
+ else:
+ return SearchObject(what).find(text,start)
+
+def findall(text,what,start=0,stop=None,
+
+ SearchObject=FS):
+
+ """ Find all occurances of what in text.
+
+ Uses a search object for the task. Returns a list of slice
+ tuples (l,r) marking the all occurances in
+ text[start:stop]. stop defaults to len(text). Returns an
+ empty list in case no occurance was found.
+
+ """
+ if stop:
+ return SearchObject(what).findall(text,start,stop)
+ else:
+ return SearchObject(what).findall(text,start)
+
+def split(text,sep,start=0,stop=None,translate=None,
+
+ SearchObject=FS):
+
+ """ A faster replacement for string.split().
+
+ Uses a search object for the task. Returns the result of
+ cutting the text[start:stop] string into snippets at every sep
+ occurance in form of a list of substrings. translate is passed
+ to the search object as translation string.
+
+ XXX convert to a C function... or even better, add as method
+ to search objects.
+
+ """
+ if translate:
+ so = SearchObject(sep,translate)
+ else:
+ so = SearchObject(sep)
+ if stop:
+ cuts = so.findall(text,start,stop)
+ else:
+ cuts = so.findall(text,start)
+ l = 0
+ list = []
+ append = list.append
+ for left,right in cuts:
+ append(text[l:left])
+ l = right
+ append(text[l:])
+ return list
+
+# helper for tagdict
+def _tagdict(text,dict,prefix,taglist):
+
+ for o,l,r,s in taglist:
+ pfx = prefix + str(o)
+ dict[pfx] = text[l:r]
+ if s:
+ _tagdict(text,dict,pfx+'.',s)
+
+def tagdict(text,*args):
+
+ """ Tag a text just like the function tag() and then convert
+ its output into a dictionary where the tagobjects reference
+ their respective strings
+ - this function emulates the interface of tag()
+ - in contrast to tag() this funtion *does* make copies
+ of the found stings
+ - returns a tuple (rc,tagdict,next) with the same meaning
+ of rc and next as tag(); tagdict is the new dictionary -
+ None in case rc is 0
+ """
+ rc,taglist,next = apply(tag,(text,)+args)
+ if not rc:
+ return (rc,None,next)
+ d = {}
+ tagdict = _tagdict
+ for o,l,r,s in taglist:
+ pfx = str(o)
+ d[pfx] = text[l:r]
+ if s:
+ tagdict(text,dict,pfx+'.',s)
+ return (rc,d,next)
+
+def invset(chars):
+
+ """ Return a set with all characters *except* the ones in chars.
+ """
+ return set(chars,0)
+
+def is_whitespace(text,start=0,stop=None,
+
+ nonwhitespace=nonwhitespace_set,setfind=setfind):
+
+ """ Return 1 iff text[start:stop] only contains whitespace
+ characters (as defined in Constants/Sets.py), 0 otherwise.
+ """
+ if stop is None:
+ stop = len(text)
+ i = setfind(text,nonwhitespace,start,stop)
+ return (i < 0)
+
+def collapse(text,seperator=' ',
+
+ join=join,setsplit=setsplit,collapse_set=set(newline+whitespace)):
+
+ """ Eliminates newline characters and compresses whitespace
+ characters into one space.
+
+ The result is a one line text string. Tim Peters will like
+ this function called with '-' seperator ;-)
+
+ """
+ return join(setsplit(text,collapse_set),seperator)
+
+_linesplit_table = (
+ (None,Is,'\r',+1),
+ (None,Is,'\n',+1),
+ ('line',AllInSet+AppendMatch,set('\r\n',0),+1,-2),
+ (None,EOF,Here,+1,MatchOk),
+ ('empty line',Skip+AppendMatch,0,0,-4),
+ )
+
+def splitlines(text,
+
+ tag=tag,linesplit_table=_linesplit_table):
+
+ """ Split text into a list of single lines.
+
+ The following combinations are considered to be line-ends:
+ '\r', '\r\n', '\n'; they may be used in any combination. The
+ line-end indicators are removed from the strings prior to
+ adding them to the list.
+
+ This function allows dealing with text files from Macs, PCs
+ and Unix origins in a portable way.
+
+ """
+ return tag(text,linesplit_table)[1]
+
+_linecount_table = (
+ (None,Is,'\r',+1),
+ (None,Is,'\n',+1),
+ ('line',AllInSet+AppendTagobj,set('\r\n',0),+1,-2),
+ (None,EOF,Here,+1,MatchOk),
+ ('empty line',Skip+AppendTagobj,0,0,-4),
+ )
+
+def countlines(text,
+
+ linecount_table=_linecount_table):
+
+ """ Returns the number of lines in text.
+
+ Line ends are treated just like for splitlines() in a
+ portable way.
+ """
+ return len(tag(text,linecount_table)[1])
+
+_wordsplit_table = (
+ (None,AllInSet,whitespace_set,+1),
+ ('word',AllInSet+AppendMatch,nonwhitespace_set,+1,-1),
+ (None,EOF,Here,+1,MatchOk),
+ )
+
+def splitwords(text,
+
+ setsplit=setsplit,whitespace_set=whitespace_set):
+
+ """ Split text into a list of single words.
+
+ Words are separated by whitespace. The whitespace is stripped
+ before adding the words to the list.
+
+ """
+ return setsplit(text,whitespace_set)
+
+#
+# Testing and benchmarking
+#
+
+# Taken from my hack.py module:
+import time
+class _timer:
+
+ """ timer class with a quite obvious interface
+ - .start() starts a fairly accurate CPU-time timer plus an
+ absolute timer
+ - .stop() stops the timer and returns a tuple: the CPU-time in seconds
+ and the absolute time elapsed since .start() was called
+ """
+
+ utime = 0
+ atime = 0
+
+ def start(self,
+ clock=time.clock,time=time.time):
+ self.atime = time()
+ self.utime = clock()
+
+ def stop(self,
+ clock=time.clock,time=time.time):
+ self.utime = clock() - self.utime
+ self.atime = time() - self.atime
+ return self.utime,self.atime
+
+ def usertime(self,
+ clock=time.clock,time=time.time):
+ self.utime = clock() - self.utime
+ self.atime = time() - self.atime
+ return self.utime
+
+ def abstime(self,
+ clock=time.clock,time=time.time):
+ self.utime = clock() - self.utime
+ self.atime = time() - self.atime
+ return self.utime
+
+ def __str__(self):
+
+ return '%0.2fu %0.2fa sec.' % (self.utime,self.atime)
+
+def _bench(file='mxTextTools/mxTextTools.c'):
+
+ def mismatch(orig,new):
+ print
+ for i in range(len(orig)):
+ if orig[i] != new[i]:
+ break
+ else:
+ print 'Length mismatch: orig=%i new=%i' % (len(orig),len(new))
+ if len(orig) > len(new):
+ print 'Missing chars:'+repr(orig[len(new):])
+ else:
+ print 'Excess chars:'+repr(new[len(orig):])
+ print
+ return
+ print 'Mismatch at offset %i:' % i
+ print (orig[i-100:i]
+ + '<- %s != %s ->' % (repr(orig[i]),repr(new[i]))
+ + orig[i+1:i+100])
+ print
+
+ text = open(file).read()
+ import string
+
+ t = _timer()
+ print 'Working on a %i byte string' % len(text)
+
+ if 0:
+ print
+ print 'Replacing strings'
+ print '-'*72
+ print
+ for what,with in (('m','M'),('mx','MX'),('mxText','MXTEXT'),
+ ('hmm','HMM'),('hmmm','HMM'),('hmhmm','HMM')):
+ print 'Replace "%s" with "%s"' % (what,with)
+ t.start()
+ for i in range(100):
+ rtext = string.replace(text,what,with)
+ print 'with string.replace:',t.stop(),'sec.'
+ t.start()
+ for i in range(100):
+ ttext = replace(text,what,with)
+ print 'with tag.replace:',t.stop(),'sec.'
+ if ttext != rtext:
+ print 'results are NOT ok !'
+ print '-'*72
+ mismatch(rtext,ttext)
+ t.start()
+ for i in range(100):
+ ttext = _replace2(text,what,with)
+ print 'with tag._replace2:',t.stop(),'sec.'
+ if ttext != rtext:
+ print 'results are NOT ok !'
+ print '-'*72
+ print rtext
+ t.start()
+ for i in range(100):
+ ttext = _replace3(text,what,with)
+ print 'with tag._replace3:',t.stop(),'sec.'
+ if ttext != rtext:
+ print 'results are NOT ok !'
+ print '-'*72
+ print rtext
+ t.start()
+ for i in range(100):
+ ttext = _replace4(text,what,with)
+ print 'with tag._replace4:',t.stop(),'sec.'
+ if ttext != rtext:
+ print 'results are NOT ok !'
+ print '-'*72
+ print rtext
+ print
+
+ if 0:
+ print
+ print 'String lower/upper'
+ print '-'*72
+ print
+
+ op = string.lower
+ t.start()
+ for i in range(1000):
+ op(text)
+ t.stop()
+ print ' string.lower:',t
+
+ op = string.upper
+ t.start()
+ for i in range(1000):
+ op(text)
+ t.stop()
+ print ' string.upper:',t
+
+ op = upper
+ t.start()
+ for i in range(1000):
+ op(text)
+ t.stop()
+ print ' TextTools.upper:',t
+
+ op = lower
+ t.start()
+ for i in range(1000):
+ op(text)
+ t.stop()
+ print ' TextTools.lower:',t
+
+ print 'Testing...',
+ ltext = string.lower(text)
+ assert ltext == lower(text)
+ utext = string.upper(text)
+ assert utext == upper(text)
+ print 'ok.'
+
+ if 0:
+ print
+ print 'Joining lists'
+ print '-'*72
+ print
+
+ l = setsplit(text,whitespace_set)
+
+ op = string.join
+ t.start()
+ for i in range(1000):
+ op(l)
+ t.stop()
+ print ' string.join:',t
+
+ op = join
+ t.start()
+ for i in range(1000):
+ op(l)
+ t.stop()
+ print ' TextTools.join:',t
+
+ op = string.join
+ t.start()
+ for i in range(1000):
+ op(l,' ')
+ t.stop()
+ print ' string.join with seperator:',t
+
+ op = join
+ t.start()
+ for i in range(1000):
+ op(l,' ')
+ t.stop()
+ print ' TextTools.join with seperator:',t
+
+ if 0:
+ print
+ print 'Creating join lists'
+ print '-'*72
+ print
+
+ repl = []
+ for i in range(0,len(text),10):
+ repl.append(str(i),i,i+1)
+
+ op = joinlist
+ t.start()
+ for i in range(1000):
+ op(text,repl)
+ t.stop()
+ print ' TextTools.joinlist:',t
+
+ if 0:
+ print
+ print 'Splitting text'
+ print '-'*72
+ print
+
+ op = string.split
+ t.start()
+ for i in range(100):
+ op(text)
+ t.stop()
+ print ' string.split whitespace:',t,'(',len(op(text)),'snippets )'
+
+ op = setsplit
+ ws = whitespace_set
+ t.start()
+ for i in range(100):
+ op(text,ws)
+ t.stop()
+ print ' TextTools.setsplit whitespace:',t,'(',len(op(text,ws)),'snippets )'
+
+ assert string.split(text) == setsplit(text,ws)
+
+ op = string.split
+ sep = 'a'
+ t.start()
+ for i in range(100):
+ op(text,sep)
+ t.stop()
+ print ' string.split at "a":',t,'(',len(op(text,sep)),'snippets )'
+
+ op = split
+ sep = 'a'
+ t.start()
+ for i in range(100):
+ op(text,sep)
+ t.stop()
+ print ' TextTools.split at "a":',t,'(',len(op(text,sep)),'snippets )'
+
+ op = charsplit
+ sep = 'a'
+ t.start()
+ for i in range(100):
+ op(text,sep)
+ t.stop()
+ print ' TextTools.charsplit at "a":',t,'(',len(op(text,sep)),'snippets )'
+
+ op = setsplit
+ sep = set('a')
+ t.start()
+ for i in range(100):
+ op(text,sep)
+ t.stop()
+ print ' TextTools.setsplit at "a":',t,'(',len(op(text,sep)),'snippets )'
+
+ # Note: string.split and setsplit don't work identically !
+
+ op = string.split
+ sep = 'int'
+ t.start()
+ for i in range(100):
+ op(text,sep)
+ t.stop()
+ print ' string.split at "int":',t,'(',len(op(text,sep)),'snippets )'
+
+ op = split
+ sep = 'int'
+ t.start()
+ for i in range(100):
+ op(text,sep)
+ t.stop()
+ print ' TextTools.split at "int":',t,'(',len(op(text,sep)),'snippets )'
+
+ op = setsplit
+ sep = set('int')
+ t.start()
+ for i in range(100):
+ op(text,sep)
+ t.stop()
+ print ' TextTools.setsplit at "i", "n", "t":',t,'(',len(op(text,sep)),'snippets )'
+
+ op = string.split
+ sep = 'register'
+ t.start()
+ for i in range(100):
+ op(text,sep)
+ t.stop()
+ print ' string.split at "register":',t,'(',len(op(text,sep)),'snippets )'
+
+ op = split
+ sep = 'register'
+ t.start()
+ for i in range(100):
+ op(text,sep)
+ t.stop()
+ print ' TextTools.split at "register":',t,'(',len(op(text,sep)),'snippets )'
+
+if __name__=='__main__':
+ _bench()
+
diff --git a/intern/python/modules/TextTools/__init__.py b/intern/python/modules/TextTools/__init__.py
new file mode 100644
index 00000000000..f9255aca276
--- /dev/null
+++ b/intern/python/modules/TextTools/__init__.py
@@ -0,0 +1,48 @@
+""" mxTextTools - A tools package for fast text processing.
+
+ (c) Copyright Marc-Andre Lemburg; All Rights Reserved.
+ See the documentation for further information on copyrights,
+ or contact the author (mal@lemburg.com).
+"""
+__package_info__ = """
+BEGIN PYTHON-PACKAGE-INFO 1.0
+Title: mxTextTools - Tools for fast text processing
+Current-Version: 1.1.1
+Home-Page: http://starship.skyport.net/~lemburg/mxTextTools.html
+Primary-Site: http://starship.skyport.net/~lemburg/mxTextTools-1.1.1.zip
+
+This package provides several different functions and mechanisms
+to do fast text text processing. Amongst these are character set
+operations, parsing & tagging tools (using a finite state machine
+executing byte code) and common things such as Boyer-Moore search
+objects. For full documentation see the home page.
+END PYTHON-PACKAGE-INFO
+"""
+from TextTools import *
+from TextTools import __version__
+
+### Make the types pickleable:
+
+# Shortcuts for pickle (reduces the pickle's length)
+def _BMS(match,translate):
+ return BMS(match,translate)
+def _FS(match,translate):
+ return FS(match,translate)
+
+# Module init
+class modinit:
+
+ ### Register the two types
+ import copy_reg
+ def pickle_BMS(so):
+ return _BMS,(so.match,so.translate)
+ def pickle_FS(so):
+ return _FS,(so.match,so.translate)
+ copy_reg.pickle(BMSType,
+ pickle_BMS,
+ _BMS)
+ copy_reg.pickle(FSType,
+ pickle_FS,
+ _FS)
+
+del modinit
diff --git a/intern/python/modules/TextTools/mxTextTools/__init__.py b/intern/python/modules/TextTools/mxTextTools/__init__.py
new file mode 100644
index 00000000000..009b7cbd4c7
--- /dev/null
+++ b/intern/python/modules/TextTools/mxTextTools/__init__.py
@@ -0,0 +1,17 @@
+""" mxTextTools - A tools package for fast text processing.
+
+ (c) Copyright Marc-Andre Lemburg; All Rights Reserved.
+ See the documentation for further information on copyrights,
+ or contact the author (mal@lemburg.com).
+"""
+from mxTextTools import *
+from mxTextTools import __version__
+
+#
+# Make BMS take the role of FS in case the Fast Search object was not built
+#
+try:
+ FS
+except NameError:
+ FS = BMS
+ FSType = BMSType
diff --git a/intern/python/modules/VRMLmain.py b/intern/python/modules/VRMLmain.py
new file mode 100644
index 00000000000..836c803269e
--- /dev/null
+++ b/intern/python/modules/VRMLmain.py
@@ -0,0 +1,3 @@
+# this file is the entry point for freeze.py
+
+from Converter import importloader
diff --git a/intern/python/modules/beta/Objects.py b/intern/python/modules/beta/Objects.py
new file mode 100644
index 00000000000..0ae56870fd6
--- /dev/null
+++ b/intern/python/modules/beta/Objects.py
@@ -0,0 +1,167 @@
+from Blender import Scene
+import Blender.NMesh as _NMesh
+import Blender.Material as Material
+
+
+defaultUV = [(0.0, 0.0), (1.0, 0.0), (1.0, 1.0), (0.0, 1.0)]
+
+FACEFLAGS = _NMesh.Const
+DEFAULTFLAGS = FACEFLAGS.LIGHT + FACEFLAGS.DYNAMIC
+
+curface = None
+tessfaces = None
+
+def error():
+ pass
+def beginPolygon():
+ global curface
+ global tessfaces
+ curface = _NMesh.Face()
+def endPolygon():
+ global curface
+ global tessfaces
+ tessfaces.append(curface)
+def addVertex(v):
+ global curface
+ curface.v.append(v)
+ curface.uv.append((v.uvco[0], v.uvco[1]))
+
+class Face:
+ def __init__(self, vlist):
+ self.v= vlist
+ self.uv = []
+ self.mode = 0
+
+class shadow:
+ def __setattr__(self, name, val):
+ setattr(self.data, name, val)
+ def __getattr__(self, name):
+ return getattr(self.data, name)
+ def __repr__(self):
+ return repr(self.data)
+
+##########################################
+# replacement xMesh (NMesh shadow class)
+
+class shadowNVert: #shadow NMVert class for the tesselator
+ def __init__(self):
+ self.vert = None
+ self.uv = []
+ def __len__(self):
+ return 3
+ def __getitem__(self, i):
+ return self.vert.co[i]
+
+def Color(r, g, b, a = 1.0):
+ return _NMesh.Col(255 * r, 255 * g, 255 * b, 255 * a)
+
+class shadowNMesh:
+ def __init__(self, name = None, default_flags = None):
+ self.scene = Scene.getCurrent()
+ self.data = _NMesh.GetRaw()
+ self.name = name
+ if default_flags:
+ flags = default_flags
+ else:
+ flags = DEFAULTFLAGS
+ self.flags = flags
+ self.smooth = 0
+ self.faces = []
+ try:
+ import tess
+ self.tess = tess.Tess(256, beginPolygon, endPolygon, error, addVertex)
+ except:
+ #print "couldn't import tesselator"
+ self.tess = None
+ self.curface = None
+ self.tessfaces = []
+ self.recalc_normals = 1
+
+ def __del__(self):
+ del self.data
+
+ def __getattr__(self, name):
+ if name == 'vertices':
+ return self.data.verts
+ else:
+ return getattr(self.data, name)
+
+ def __repr__(self):
+ return "Mesh: %d faces, %d vertices" % (len(self.faces), len(self.verts))
+ def toNMFaces(self, ngon):
+ # This should be a Publisher only feature...once the tesselation
+ # is improved. The GLU tesselator of Mesa < 4.0 is crappy...
+ if not self.tess:
+ return [] # no faces converted
+ import tess
+ i = 0
+ global tessfaces
+ tessfaces = []
+ tess.beginPolygon(self.tess)
+ for v in ngon.v:
+ if len(ngon.uv) == len(ngon.v):
+ v.uvco = ngon.uv[i]
+ tess.vertex(self.tess, (v.co[0], v.co[1], v.co[2]), v)
+ i += 1
+ tess.endPolygon(self.tess)
+ return tessfaces
+
+ def hasFaceUV(self, true):
+ self.data.hasFaceUV(true)
+
+ def addVert(self, v):
+ vert = _NMesh.Vert(v[0], v[1], v[2])
+ self.data.verts.append(vert)
+ return vert
+
+ def addFace(self, vlist, flags = None, makedefaultUV = 0):
+ n = len(vlist)
+ if n > 4:
+ face = Face(vlist)
+ else:
+ face = _NMesh.Face()
+ for v in vlist:
+ face.v.append(v)
+ if makedefaultUV:
+ face.uv = defaultUV[:n]
+ self.faces.append(face)
+ # turn on default flags:
+ if not flags:
+ face.mode = self.flags
+ else:
+ face.mode = flags
+ return face
+
+ def write(self):
+ from Blender import Object
+ # new API style:
+ self.update()
+ ob = Object.New(Object.Types.MESH) # create object
+ ob.link(self.data) # link mesh data to it
+ self.scene.link(ob)
+ return ob
+
+ def update(self):
+ from Blender.Types import NMFaceType
+ smooth = self.smooth
+ for f in self.faces:
+ if type(f) == NMFaceType:
+ f.smooth = smooth
+ self.data.faces.append(f)
+ f.materialIndex = 0
+ else: #it's a NGON (shadow face)
+ faces = self.toNMFaces(f)
+ for nf in faces:
+ nf.smooth = smooth
+ nf.materialIndex = 0
+ self.data.faces.append(nf)
+
+ if not self.name:
+ self.name = "Mesh"
+
+ def assignMaterial(self, material):
+ self.data.materials = [material._object]
+
+Mesh = shadowNMesh
+Vert = shadowNVert
+
diff --git a/intern/python/modules/beta/Scenegraph.py b/intern/python/modules/beta/Scenegraph.py
new file mode 100644
index 00000000000..388beeb7b11
--- /dev/null
+++ b/intern/python/modules/beta/Scenegraph.py
@@ -0,0 +1,182 @@
+
+"""This is a basic scenegraph module for Blender
+It contains low level API calls..."""
+
+# (c) 2001, Martin Strubel // onk@section5.de
+
+from utils import quat #quaternions
+
+from Blender import Object, Lamp, Scene
+
+
+TOLERANCE = 0.01
+
+def uniform_scale(vec):
+ v0 = vec[0]
+ d = abs(vec[1] - v0)
+ if d > TOLERANCE:
+ return 0
+ d = abs(vec[2] - v0)
+ if d > TOLERANCE:
+ return 0
+ return v0
+
+class Transform:
+ """An abstract transform, containing translation, rotation and scale information"""
+ def __init__(self):
+ self.scale = (1.0, 1.0, 1.0)
+ self.translation = (0.0, 0.0, 0.0)
+ self.rotation = quat.Quat()
+ self.scaleOrientation = quat.Quat() # axis, angle
+ self.parent = None
+ def __mul__(self, other):
+ s = uniform_scale(self.scale)
+ if not s:
+ raise RuntimeError, "non uniform scale, can't multiply"
+ t = Transform()
+ sc = other.scale
+ t.scale = (s * sc[0], s * sc[1], s * sc[2])
+ t.rotation = self.rotation * other.rotation
+ tr = s * apply(quat.Vector, other.translation)
+ t.translation = self.rotation.asMatrix() * tr + self.translation
+ return t
+ def getLoc(self):
+ t = self.translation
+ return (t[0], t[1], t[2]) # make sure it's a tuple..silly blender
+ def calcRotfromAxis(self, axisrotation):
+ self.rotation = apply(quat.fromRotAxis,axisrotation)
+ def getRot(self):
+ return self.rotation.asEuler()
+ def getSize(self):
+ s = self.scale
+ return (s[0], s[1], s[2])
+ def __repr__(self):
+ return "Transform: rot: %s loc:%s" % (self.getRot(), self.getLoc())
+ def copy(self):
+ "returns copy of self"
+ t = Transform()
+ t.scale = self.scale
+ t.translation = self.translation
+ t.rotation = self.rotation
+ t.scaleOrientation = self.scaleOrientation
+ return t
+
+class BID:
+ "Blender named Object ID"
+ def __init__(self, name):
+ self.name = name
+ self.data = None
+
+class BScene:
+ def __init__(self, name = None):
+ from Blender import Scene
+ self.dict = {'Image': {}, 'Object':{}, 'Mesh' : {}}
+ self.name = name
+ def __getitem__(self, name):
+ return self.dict[name]
+ def __setitem__(self, name, val):
+ self.dict[name] = val
+ def has_key(self, name):
+ if self.dict.has_key(name):
+ return 1
+ else:
+ return 0
+ def getnewID(self, templ):
+ n = 0
+ name = templ
+ while self.dict.has_key(name):
+ n += 1
+ name = "%s.%03d" % (templ, n)
+ return name
+
+class BSGNode:
+ "Blender Scenegraph node"
+ isRoot = 0
+ def __init__(self, object = None, type = "", name = ""):
+ self.type = type
+ self.name = name
+ self.children = []
+ self.level = 0
+ self.object = object
+ def addChildren(self, children):
+ self.children += children
+ def traverse(self, visitor):
+ ret = visitor()
+ for c in self.children:
+ c.traverse(visitor)
+ return ret
+ def setDepth(self, level):
+ self.level = level
+ for c in self.children:
+ c.setDepth(level + 1)
+ def update(self):
+ ob.name = self.name
+ def __repr__(self):
+ l = self.level
+ children = ""
+ pre = l * ' '
+ return "\n%s%s [%s] ->%s" % (pre, self.name, self.type, self.children)
+
+class ObjectNode(BSGNode):
+ def __init__(self, object = None, type = "", name = ""):
+ self.transform = Transform()
+ self.scene = Scene.getCurrent()
+ BSGNode.__init__(self, object, type, name)
+ def makeParent(self, child):
+ self.child = parent
+ child.parent = self
+ def clone(self):
+ ob = self.object
+ newob = ob.copy()
+ self.scene.link(newob)
+ new = ObjectNode(newob)
+ new.transform = self.transform.copy()
+ return new
+ def insert(self, child):
+ self.children.append(child)
+ child.level = self.level + 1
+ ob = child.object
+ self.object.makeParent([ob], 1, 1)
+ # first parent, THEN set local transform
+ child.update()
+ def applyTransform(self, tf):
+ self.transform = tf * self.transform
+ def update(self):
+ ob = self.object
+ t = self.transform
+ ob.loc = t.getLoc()
+ ob.size = t.getSize()
+ ob.rot = t.getRot()
+ ob.name = self.name
+
+def NodefromData(ob, type, name):
+ new = ObjectNode(None, type, name)
+ if ob:
+ obj = ob
+ else:
+ obj = Object.New(type)
+ Scene.getCurrent().link(obj)
+ if not obj:
+ raise RuntimeError, "FATAL: could not create object"
+ new.object= obj
+ new.object.name = name
+ #new.fromData(ob)
+ return new
+
+class RootNode(ObjectNode):
+ """stupid simple scenegraph prototype"""
+ level = 0
+ isRoot = 1
+ type = 'Root'
+ name = 'ROOT'
+
+ def __init__(self, object = None, type = "", name = ""):
+ from Blender import Scene
+ self.transform = Transform()
+ BSGNode.__init__(self, object, type, name)
+ self.scene = Scene.getCurrent()
+ def insert(self, child):
+ child.update()
+ self.children.append(child)
+ def update(self):
+ self.scene.update()
diff --git a/intern/python/modules/beta/__init__.py b/intern/python/modules/beta/__init__.py
new file mode 100644
index 00000000000..4ceb9fd97a1
--- /dev/null
+++ b/intern/python/modules/beta/__init__.py
@@ -0,0 +1 @@
+__all__ = ["Scenegraph", "Objects"]
diff --git a/intern/python/modules/blenderos.py b/intern/python/modules/blenderos.py
new file mode 100644
index 00000000000..d093388d88f
--- /dev/null
+++ b/intern/python/modules/blenderos.py
@@ -0,0 +1,24 @@
+# This is the built in Blender emulation module for os.py
+# not all features are implemented yet...
+
+import Blender.sys as bsys
+
+sep = bsys.dirsep # path separator ('/' or '\')
+
+class Path:
+ def dirname(self, name):
+ return bsys.dirname(name)
+ def join(self, a, *p):
+ dirsep = bsys.dirsep
+ path = a
+ for b in p:
+ if b[:1] == dirsep:
+ path = b
+ elif path == '' or path[-1:] == dirsep:
+ path = path + b
+ else:
+ path = path + dirsep + b
+ return path
+
+path = Path()
+
diff --git a/intern/python/modules/mcf/__init__.py b/intern/python/modules/mcf/__init__.py
new file mode 100644
index 00000000000..a553f20ba1c
--- /dev/null
+++ b/intern/python/modules/mcf/__init__.py
@@ -0,0 +1,6 @@
+#mcf 'vendor' packages
+
+#These packages are free software, provided without warranty or
+#guarantee, if you use them, you must agree to use them at your
+#own risk. Please see the file license.txt for full license
+#details.
diff --git a/intern/python/modules/mcf/utils/__init__.py b/intern/python/modules/mcf/utils/__init__.py
new file mode 100644
index 00000000000..156e4dbe16b
--- /dev/null
+++ b/intern/python/modules/mcf/utils/__init__.py
@@ -0,0 +1,6 @@
+'''
+mcf.utils package
+
+
+'''
+
diff --git a/intern/python/modules/mcf/utils/collapse.py b/intern/python/modules/mcf/utils/collapse.py
new file mode 100644
index 00000000000..25da50c2adb
--- /dev/null
+++ b/intern/python/modules/mcf/utils/collapse.py
@@ -0,0 +1,169 @@
+'''
+Destructive Functions for "collapsing" Sequences into single levels
+
+>>> from mcf.utils import collapse
+
+>>> collapse.test([[[1],[2,3]],[[]],[4],5,[6]])
+
+[1, 2, 3, 4, 5, 6] # note that is the same root list
+
+>>> collapse.collapse2([[[1],[2,3]],[[]],(4,()),(5,),[6]])
+
+[1, 2, 3, 4, 5, 6] # note is the same root list
+'''
+import copy, types, sys
+from types import ListType, TupleType # this now only supports the obsolete stuff...
+
+def hyperCollapse( inlist, allowedmap, type=type, list=list, itype=types.InstanceType, maxint= sys.maxint):
+ '''
+ Destructively flatten a mixed hierarchy to a single level.
+ Non-recursive, many speedups and obfuscations by Tim Peters :)
+ '''
+ try:
+ # for every possible index
+ for ind in xrange( maxint):
+ # while that index currently holds a list
+ expandable = 1
+ while expandable:
+ expandable = 0
+ if allowedmap.has_key( type(inlist[ind]) ):
+ # expand that list into the index (and subsequent indicies)
+ inlist[ind:ind+1] = list( inlist[ind])
+ expandable = 1
+
+ # alternately you could iterate through checking for isinstance on all possible
+ # classes, but that would be very slow
+ elif type( inlist[ind] ) is itype and allowedmap.has_key( inlist[ind].__class__ ):
+ # here figure out some way to generically expand that doesn't risk
+ # infinite loops...
+ templist = []
+ for x in inlist[ind]:
+ templist.append( x)
+ inlist[ind:ind+1] = templist
+ expandable = 1
+ except IndexError:
+ pass
+ return inlist
+
+
+def collapse(inlist, type=type, ltype=types.ListType, maxint= sys.maxint):
+ '''
+ Destructively flatten a list hierarchy to a single level.
+ Non-recursive, and (as far as I can see, doesn't have any
+ glaring loopholes).
+ Further speedups and obfuscations by Tim Peters :)
+ '''
+ try:
+ # for every possible index
+ for ind in xrange( maxint):
+ # while that index currently holds a list
+ while type(inlist[ind]) is ltype:
+ # expand that list into the index (and subsequent indicies)
+ inlist[ind:ind+1] = inlist[ind]
+ #ind = ind+1
+ except IndexError:
+ pass
+ return inlist
+
+def collapse_safe(inlist):
+ '''
+ As collapse, but works on a copy of the inlist
+ '''
+ return collapse( inlist[:] )
+
+def collapse2(inlist, ltype=(types.ListType, types.TupleType), type=type, maxint= sys.maxint ):
+ '''
+ Destructively flatten a list hierarchy to a single level.
+ Will expand tuple children as well, but will fail if the
+ top level element is not a list.
+ Non-recursive, and (as far as I can see, doesn't have any
+ glaring loopholes).
+ '''
+ ind = 0
+ try:
+ while 1:
+ while type(inlist[ind]) in ltype:
+ try:
+ inlist[ind:ind+1] = inlist[ind]
+ except TypeError:
+ inlist[ind:ind+1] = list(inlist[ind])
+ ind = ind+1
+ except IndexError:
+ pass
+ return inlist
+
+def collapse2_safe(inlist):
+ '''
+ As collapse2, but works on a copy of the inlist
+ '''
+ return collapse( list(inlist) )
+
+def old_buggy_collapse(inlist):
+ '''Always return a one-level list of all the non-list elements in listin,
+ rewritten to be non-recursive 96-12-28 Note that the new versions work
+ on the original list, not a copy of the original.'''
+ if type(inlist)==TupleType:
+ inlist = list(inlist)
+ elif type(inlist)!=ListType:
+ return [inlist]
+ x = 0
+ while 1:
+ try:
+ y = inlist[x]
+ if type(y) == ListType:
+ ylen = len(y)
+ if ylen == 1:
+ inlist[x] = y[0]
+ if type(inlist[x]) == ListType:
+ x = x - 1 # need to collapse that list...
+ elif ylen == 0:
+ del(inlist[x])
+ x = x-1 # list has been shortened
+ else:
+ inlist[x:x+1]=y
+ x = x+1
+ except IndexError:
+ break
+ return inlist
+
+
+def old_buggy_collapse2(inlist):
+ '''As collapse, but also collapse tuples, rewritten 96-12-28 to be non-recursive'''
+ if type(inlist)==TupleType:
+ inlist = list(inlist)
+ elif type(inlist)!=ListType:
+ return [inlist]
+ x = 0
+ while 1:
+ try:
+ y = inlist[x]
+ if type(y) in [ListType, TupleType]:
+ ylen = len(y)
+ if ylen == 1:
+ inlist[x] = y[0]
+ if type(inlist[x]) in [ListType,TupleType]:
+ x = x-1 #(to deal with that element)
+ elif ylen == 0:
+ del(inlist[x])
+ x = x-1 # list has been shortened, will raise exception with tuples...
+ else:
+ inlist[x:x+1]=list(y)
+ x = x+1
+ except IndexError:
+ break
+ return inlist
+
+
+def oldest_buggy_collapse(listin):
+ 'Always return a one-level list of all the non-list elements in listin'
+ if type(listin) == ListType:
+ return reduce(lambda x,y: x+y, map(collapse, listin), [])
+ else: return [listin]
+
+def oldest_buggy_collapse2(seqin):
+
+ if type(seqin) in [ListType, TupleType]:
+ return reduce(lambda x,y: x+y, map(collapse2, seqin), [])
+ else:
+ return [seqin]
+
diff --git a/intern/python/modules/mcf/utils/copy_extend.py b/intern/python/modules/mcf/utils/copy_extend.py
new file mode 100644
index 00000000000..687564cf12c
--- /dev/null
+++ b/intern/python/modules/mcf/utils/copy_extend.py
@@ -0,0 +1,83 @@
+'''
+Module to allow for "copying" Numeric arrays,
+(and thereby also matrices and userarrays)
+standard arrays, classes and modules
+(last two are not actually copied, but hey :) ).
+
+Could do approximately the same thing with
+copy_reg, but would be inefficient because
+of passing the data into and out of strings.
+
+To use, just import this module.
+'''
+# altered 98.11.05, moved copy out of NUMPY test
+import copy
+try: # in case numpy not installed
+ import Numeric
+ def _numpyarray_copy(somearray, memo=None):
+ '''
+ Simple function for getting a copy of a NUMPY array
+ '''
+ if memo == None:
+ memo = {} # yeah, I know, not _really_ necessary
+ # see if already done this item, return the copy if we have...
+ d = id(somearray)
+ try:
+ return memo[d]
+ except KeyError:
+ pass
+ temp = Numeric.array(somearray, copy=1)
+ memo[d] = temp
+ return temp
+ # now make it available to the copying functions
+ copy._copy_dispatch[Numeric.ArrayType] = _numpyarray_copy
+ copy._deepcopy_dispatch[Numeric.ArrayType] = _numpyarray_copy
+except ImportError: # Numeric not installed...
+ pass
+
+try: # in case array not installed
+ import array
+ def _array_copy(somearray, memo = None):
+ '''
+ Simple function for getting a copy of a standard array.
+ '''
+ if memo == None:
+ memo = {} # yeah, I know, not _really_ necessary
+ # see if already done this item, return the copy if we have...
+ d = id(somearray)
+ try:
+ return memo[d]
+ except KeyError:
+ pass
+ newarray = somearay[:]
+ memo[d] = newarray
+ return newarray
+
+ # now make it available to the copying functions
+ copy._copy_dispatch[ array.ArrayType ] = _array_copy
+ copy._deepcopy_dispatch[ array.ArrayType ] = _array_copy
+except ImportError:
+ pass
+
+import types
+
+def _module_copy(somemodule, memo = None):
+ '''
+ Modules we will always treat as themselves during copying???
+ '''
+ return somemodule
+
+# now make it available to the copying functions
+copy._copy_dispatch[ types.ModuleType ] = _module_copy
+copy._deepcopy_dispatch[ types.ModuleType ] = _module_copy
+
+def _class_copy(someclass, memo=None):
+ '''
+ Again, classes are considered immutable, they are
+ just returned as themselves, not as new objects.
+ '''
+ return someclass
+
+# now make it available to the copying functions
+#copy._copy_dispatch[ types.ClassType ] = _class_copy
+copy._deepcopy_dispatch[ types.ClassType ] = _class_copy
diff --git a/intern/python/modules/mcf/utils/cpickle_extend.py b/intern/python/modules/mcf/utils/cpickle_extend.py
new file mode 100644
index 00000000000..aaca41d51fb
--- /dev/null
+++ b/intern/python/modules/mcf/utils/cpickle_extend.py
@@ -0,0 +1,190 @@
+'''
+Extend cpickle storage to include modules, and builtin functions/methods
+
+To use, just import this module.
+'''
+import copy_reg
+
+### OBJECTS WHICH ARE RESTORED THROUGH IMPORTS
+# MODULES
+def pickle_module(module):
+ '''
+ Store a module to a pickling stream, must be available for
+ reimport during unpickling
+ '''
+ return unpickle_imported_code, ('import %s'%module.__name__, module.__name__)
+
+# FUNCTIONS, METHODS (BUILTIN)
+def pickle_imported_code(funcmeth):
+ '''
+ Store a reference to an imported element (such as a function/builtin function,
+ Must be available for reimport during unpickling.
+ '''
+ module = _whichmodule(funcmeth)
+ return unpickle_imported_code, ('from %s import %s'%(module.__name__,funcmeth.__name__),funcmeth.__name__)
+
+import types, regex
+import_filter = regex.compile('''\(from [A-Za-z0-9_\.]+ \)?import [A-Za-z0-9_\.]+''') # note the limitations on whitespace
+getattr_filter = regex.compile('''[A-Za-z0-9_\.]+''') # note we allow you to use x.y.z here
+
+# MODULES, AND FUNCTIONS
+def unpickle_imported_code(impstr,impname):
+ '''
+ Attempt to load a reference to a module or other imported code (such as functions/builtin functions)
+ '''
+ if import_filter.match(impstr) != len(impstr) or getattr_filter.match(impname)!= len(impname):
+ import sys
+ sys.stderr.write('''Possible attempt to smuggle arbitrary code into pickle file (see module cpickle_extend).\nPassed code was %s\n%s\n'''%(impstr,impname))
+ del(sys)
+ else:
+ ns = {}
+ try:
+ exec (impstr) in ns # could raise all sorts of errors, of course, and is still dangerous when you have no control over the modules on your system! Do not allow for untrusted code!!!
+ return eval(impname, ns)
+ except:
+ import sys
+ sys.stderr.write('''Error unpickling module %s\n None returned, will likely raise errors.'''%impstr)
+ return None
+
+# Modules
+copy_reg.pickle(type(regex),pickle_module,unpickle_imported_code)
+# builtin functions/methods
+copy_reg.pickle(type(regex.compile),pickle_imported_code, unpickle_imported_code)
+
+del(regex) # to keep the namespace neat as possible
+
+### INSTANCE METHODS
+'''
+The problem with instance methods is that they are almost always
+stored inside a class somewhere. We really need a new type: reference
+that lets us just say "y.this"
+
+We also need something that can reliably find burried functions :( not
+likely to be easy or clean...
+
+then filter for x is part of the set
+'''
+import new
+
+def pickle_instance_method(imeth):
+ '''
+ Use the (rather surprisingly clean) internals of
+ the method to store a reference to a method. Might
+ be better to use a more general "get the attribute
+ 'x' of this object" system, but I haven't written that yet :)
+ '''
+ klass = imeth.im_class
+ funcimp = _imp_meth(imeth)
+ self = imeth.im_self # will be None for UnboundMethodType
+ return unpickle_instance_method, (funcimp,self,klass)
+def unpickle_instance_method(funcimp,self,klass):
+ '''
+ Attempt to restore a reference to an instance method,
+ the instance has already been recreated by the system
+ as self, so we just call new.instancemethod
+ '''
+ funcimp = apply(unpickle_imported_code, funcimp)
+ return new.instancemethod(func,self,klass)
+
+copy_reg.pickle(types.MethodType, pickle_instance_method, unpickle_instance_method)
+copy_reg.pickle(types.UnboundMethodType, pickle_instance_method, unpickle_instance_method)
+
+### Arrays
+try:
+ import array
+ LittleEndian = array.array('i',[1]).tostring()[0] == '\001'
+ def pickle_array(somearray):
+ '''
+ Store a standard array object, inefficient because of copying to string
+ '''
+ return unpickle_array, (somearray.typecode, somearray.tostring(), LittleEndian)
+ def unpickle_array(typecode, stringrep, origendian):
+ '''
+ Restore a standard array object
+ '''
+ newarray = array.array(typecode)
+ newarray.fromstring(stringrep)
+ # floats are always big-endian, single byte elements don't need swapping
+ if origendian != LittleEndian and typecode in ('I','i','h','H'):
+ newarray.byteswap()
+ return newarray
+ copy_reg.pickle(array.ArrayType, pickle_array, unpickle_array)
+except ImportError: # no arrays
+ pass
+
+### NUMPY Arrays
+try:
+ import Numeric
+ LittleEndian = Numeric.array([1],'i').tostring()[0] == '\001'
+ def pickle_numpyarray(somearray):
+ '''
+ Store a numpy array, inefficent, but should work with cPickle
+ '''
+ return unpickle_numpyarray, (somearray.typecode(), somearray.shape, somearray.tostring(), LittleEndian)
+ def unpickle_numpyarray(typecode, shape, stringval, origendian):
+ '''
+ Restore a numpy array
+ '''
+ newarray = Numeric.fromstring(stringval, typecode)
+ Numeric.reshape(newarray, shape)
+ if origendian != LittleEndian and typecode in ('I','i','h','H'):
+ # this doesn't seem to work correctly, what's byteswapped doing???
+ return newarray.byteswapped()
+ else:
+ return newarray
+ copy_reg.pickle(Numeric.ArrayType, pickle_numpyarray, unpickle_numpyarray)
+except ImportError:
+ pass
+
+### UTILITY FUNCTIONS
+classmap = {}
+def _whichmodule(cls):
+ """Figure out the module in which an imported_code object occurs.
+ Search sys.modules for the module.
+ Cache in classmap.
+ Return a module name.
+ If the class cannot be found, return __main__.
+ Copied here from the standard pickle distribution
+ to prevent another import
+ """
+ if classmap.has_key(cls):
+ return classmap[cls]
+ clsname = cls.__name__
+ for name, module in sys.modules.items():
+ if name != '__main__' and \
+ hasattr(module, clsname) and \
+ getattr(module, clsname) is cls:
+ break
+ else:
+ name = '__main__'
+ classmap[cls] = name
+ return name
+
+import os, string, sys
+
+def _imp_meth(im):
+ '''
+ One-level deep recursion on finding methods, i.e. we can
+ find them only if the class is at the top level.
+ '''
+ fname = im.im_func.func_code.co_filename
+ tail = os.path.splitext(os.path.split(fname)[1])[0]
+ ourkeys = sys.modules.keys()
+ possibles = filter(lambda x,tail=tail: x[-1] == tail, map(string.split, ourkeys, ['.']*len(ourkeys)))
+
+ # now, iterate through possibles to find the correct class/function
+ possibles = map(string.join, possibles, ['.']*len(possibles))
+ imp_string = _search_modules(possibles, im.im_func)
+ return imp_string
+
+def _search_modules(possibles, im_func):
+ for our_mod_name in possibles:
+ our_mod = sys.modules[our_mod_name]
+ if hasattr(our_mod, im_func.__name__) and getattr(our_mod, im_func.__name__).im_func is im_func:
+ return 'from %s import %s'%(our_mod.__name__, im_func.__name__), im_func.__name__
+ for key,val in our_mod.__dict__.items():
+ if hasattr(val, im_func.__name__) and getattr(val, im_func.__name__).im_func is im_func:
+ return 'from %s import %s'%(our_mod.__name__,key), '%s.%s'%(key,im_func.__name__)
+ raise '''No import string calculable for %s'''%im_func
+
+
diff --git a/intern/python/modules/mcf/utils/dictbool.py b/intern/python/modules/mcf/utils/dictbool.py
new file mode 100644
index 00000000000..cd549b7a681
--- /dev/null
+++ b/intern/python/modules/mcf/utils/dictbool.py
@@ -0,0 +1,80 @@
+'''
+DictBool:
+Simplistic (and slow) implementation of Boolean operations for
+dictionaries... really these should be implemented in C, but I
+can't do that till I have MSVC++, which I don't really want to
+buy... this will have to do in the meantime.
+
+>>> from mcf.utils import dictbool
+
+>>> a = {1:2}; b = {2:3}; c={4:5,6:7,8:9,1:5}
+
+>>> dictbool.union(a,b,c) # overwrite a with b and the result with c
+
+{1: 5, 2: 3, 4: 5, 8: 9, 6: 7}
+
+>>> dictbool.collectunion(a,b,c) # collect all possible for each key
+
+{1: [2, 5], 2: [3], 4: [5], 8: [9], 6: [7]}
+
+>>> dictbool.intersect(a,b,c) # no common elements in all three
+
+{}
+
+>>> dictbool.intersect(a,c) # one element is common to both
+
+{1: [2, 5]}
+'''
+
+def union(*args):
+ '''
+ Build a new dictionary with the key,val from all args,
+ first overwritten by second, overwritten by third etc.
+ Rewritten for Python 1.5 on 98.03.31
+ '''
+ temp = {}
+ for adict in args:
+ # following is the 1.5 version
+ temp.update(adict)
+# for key,val in adict.items():
+# temp[key] = val
+ return temp
+
+def collectunion(*args):
+ '''
+ As union, save instead of overwriting, all vals are
+ returned in lists, and duplicates are appended to those
+ lists.
+ '''
+ temp = {}
+ for adict in args:
+ for key,val in adict.items():
+ try:
+ temp[key].append(val)
+ except KeyError:
+ temp[key] = [val]
+ return temp
+
+def intersect(*args):
+ '''
+ Build a new dictionary with those keys common to all args,
+ the vals of the new dict are lists of length len(args), where
+ list[ind] is the value of args[ind] for that key.
+ '''
+ args = map(lambda x: (len(x),x), args)
+ args.sort()
+ temp = {}
+ master = args[0][1]
+ rest = map(lambda x: x[1], args[1:])
+ for var,val in master.items():
+ tempval = [val]
+ for slave in rest:
+ try:
+ tempval.append(slave[var])
+ except KeyError:
+ tempval = None
+ break
+ if tempval:
+ temp[var] = tempval
+ return temp
+
diff --git a/intern/python/modules/mcf/utils/dsort.py b/intern/python/modules/mcf/utils/dsort.py
new file mode 100644
index 00000000000..cd6ad6b6c32
--- /dev/null
+++ b/intern/python/modules/mcf/utils/dsort.py
@@ -0,0 +1,91 @@
+nullval = (1,)
+
+class DSort:
+ '''
+ A "dependency" sorting class, used to order elements
+ according to declared "dependencies" (many-to-one relationships)
+ Is not a beautiful algo, but it works (or seems to)
+ Requires hashable values for all elements.
+
+ This is a quick hack, use at your own risk!
+
+ Basic usage:
+ Create a DSort mysorter
+ for each element q which is part of the set to sort, call:
+ mysorter.rule( dsort.nullval, q)
+ # this is not strictly necessary for elements which are
+ # dependent on other objects, but it is necessary for
+ # those which are not. Generally it's easiest to call
+ # the null rule for each element.
+ for each rule x depends on y, call:
+ mysorter.rule( x, y)
+ when _all_ rules are entered, call
+ try:
+ sortedlist = mysorter.sort()
+ except ValueError:
+ handle recursive dependencies here...
+
+
+ For an example of real-life use, see the VRML lineariser.
+
+ '''
+ def __init__(self, recurseError=None ):
+ self.dependon = {nullval:[0]}
+ self.recurseError = recurseError
+ def rule( self, depon, deps):
+ '''
+ Register a "rule". Both elements must be hashable values.
+ See the class' documentation for usage.
+ '''
+# print '''registering rule:''', depon, deps
+ if self.dependon.has_key( deps ) and depon is not nullval:
+ self.dependon[ deps ].append( depon )
+ elif depon is not nullval:
+ self.dependon[ deps ] = [-1, depon]
+ elif not self.dependon.has_key( deps ):
+ self.dependon[ deps ] = [-1 ]
+ def sort( self ):
+ '''
+ Get the sorted results as a list
+ '''
+ for key, value in self.dependon.items():
+ self._dsort( key, value)
+ temp = []
+ for key, value in self.dependon.items():
+ temp.append( (value[0], key) )
+ temp.sort()
+ temp.reverse()
+ temp2 = []
+ for x,y in temp:
+ temp2.append( y )
+ # following adds the elements with no dependencies
+ temp2[len(temp2):] = self.dependon[ nullval ][1:]
+ return temp2
+ def _dsort( self, key, value ):
+ if value[0] == -2:
+ if self.recurseError:
+ raise ValueError, '''Dependencies were recursive!'''
+ else:
+ if __debug__:
+ print '''Recursive dependency discovered and ignored in dsort.Dsort._dsort on %s:%s'''%(key, value)
+ return 1 # we know it has at least one reference...
+ elif value[0] == -1: # haven't yet calculated this rdepth
+ value[0] = -2
+ tempval = [0]
+ for x in value[1:]:
+ try:
+ tempval.append( 1 + self._dsort( x, self.dependon[x]) )
+ except KeyError:
+ self.dependon[ nullval ].append( x ) # is an unreferenced element
+ tempval.append( 1 )
+ value[0] = max( tempval )
+ return value[0]
+ else:
+ return value[0]
+'''
+from mcf.utils import dsort
+>>> x = dsort.DSort()
+>>> map( x.rule, [1,2,2,4,5,4], [2,3,4,5,6,3] )
+[None, None, None, None, None, None]
+>>> x.sort()
+''' \ No newline at end of file
diff --git a/intern/python/modules/mcf/utils/dummy.py b/intern/python/modules/mcf/utils/dummy.py
new file mode 100644
index 00000000000..fb68c4049bf
--- /dev/null
+++ b/intern/python/modules/mcf/utils/dummy.py
@@ -0,0 +1,91 @@
+'''
+Dummy Class, intended as an abstract class for the creation
+of base/builtin classes with slightly altered functionality
+uses _base as the name of an instance of the base datatype,
+mapping all special functions to that name.
+
+>>> from mcf.utils import dummy
+
+>>> j = dummy.Dummy({})
+
+>>> j['this'] = 23
+
+>>> j
+
+{'this': 23}
+
+>>> class example(dummy.Dummy):
+
+... def __repr__(self):
+
+... return '<example: %s>'%`self._base`
+
+>>> k = example([])
+
+>>> k # uses the __repr__ function
+
+<example: []>
+
+>>> k.append # finds the attribute of the _base
+
+<built-in method append of list object at 501830>
+
+'''
+import types, copy
+
+class Dummy:
+ 'Abstract class for slightly altering functionality of objects (including builtins)'
+ def __init__(self, val=None):
+ 'Initialisation, should be overridden'
+ if val and type(val)== types.InstanceType and hasattr(val, '_base'):
+ # Dict is used because subclasses often want to override
+ # the setattr function
+ self.__dict__['_base']=copy.copy(val.__dict__['_base'])
+ else:
+ self.__dict__['_base'] = val
+ def __repr__(self):
+ 'Return a string representation'
+ return repr(self._base)
+ def __str__(self):
+ 'Convert to a string'
+ return str(self._base)
+ def __cmp__(self,other):
+ 'Compare to other value'
+ # altered 98.03.17 from if...elif...else statement
+ return cmp(self._base, other)
+ def __getitem__(self, key):
+ 'Get an item by index'
+ return self._base[key]
+ def __setitem__(self, key, val):
+ 'Set an item by index'
+ self._base[key]=val
+ def __len__(self):
+ 'return the length of the self'
+ return len(self._base)
+ def __delitem__(self, key):
+ 'remove an item by index'
+ del(self._base[key])
+ def __getslice__(self, i, j):
+ 'retrieve a slice by indexes'
+ return self._base[i:j]
+ def __setslice__(self, i, j, val):
+ 'set a slice by indexes to values'
+ self._base[i:j]=val
+ def __delslice__(self, i, j):
+ 'remove a slice by indexes'
+ del(self._base[i:j])
+ def __nonzero__(self):
+ if self._base:
+ return 1
+ else:
+ return 0
+ def __getattr__(self, attr):
+ 'find an attribute when normal lookup fails, will raise a KeyError if missing _base attribute'
+ try:
+ return getattr( self.__dict__['_base'], attr)
+ except (AttributeError, KeyError):
+ try:
+ return self.__dict__['_base'][attr]
+ except (KeyError,TypeError):
+ pass
+ raise AttributeError, attr
diff --git a/intern/python/modules/mcf/utils/err.py b/intern/python/modules/mcf/utils/err.py
new file mode 100644
index 00000000000..3c6591a6873
--- /dev/null
+++ b/intern/python/modules/mcf/utils/err.py
@@ -0,0 +1,37 @@
+'''
+err.py Encapsulated writing to sys.stderr
+
+The idea of this module is that, for a GUI system (or a more advanced UI),
+you can just import a different err module (or object) and keep
+your code the same. (For instance, you often want a status window
+which flashes warnings and info, and have error messages pop up an
+alert to get immediate attention.
+'''
+
+import sys
+
+def err(message, Code=0):
+ '''
+ report an error, with an optional error code
+ '''
+ if Code:
+ sys.stderr.write('Error #%i: %s\n'%(Code,message))
+ else:
+ sys.stderr.write('Error: %s\n'%message)
+def warn(message, Code=0):
+ '''
+ report a warning, with an optional error code
+ '''
+ if Code:
+ sys.stderr.write('Warning #%i: %s\n'%(Code,message))
+ else:
+ sys.stderr.write('Warning: %s\n'%message)
+def info(message, Code=0):
+ '''
+ report information/status, with an optional error code
+ '''
+ if Code:
+ sys.stderr.write('Info #%i: %s\n'%(Code,message))
+ else:
+ sys.stderr.write('Info: %s\n'%message)
+
diff --git a/intern/python/modules/mcf/utils/extpkl.py b/intern/python/modules/mcf/utils/extpkl.py
new file mode 100644
index 00000000000..8ae52969281
--- /dev/null
+++ b/intern/python/modules/mcf/utils/extpkl.py
@@ -0,0 +1,19 @@
+'''
+Make either cPickle or pickle available as the virtual
+module mcf.utils.pickle. This allows you to use a single
+import statement:
+
+ from mcf.utils import extpkl, pickle
+
+and then use that pickle, knowing that you have the best
+available pickling engine.
+'''
+defaultset = ('import cPickle', 'cPickle')
+import sys, mcf.utils
+from mcf.utils import cpickle_extend
+try:
+ import cPickle
+ pickle = cPickle
+except:
+ import pickle
+sys.modules['mcf.utils.pickle'] = mcf.utils.pickle = pickle
diff --git a/intern/python/modules/mcf/utils/fileassociation.py b/intern/python/modules/mcf/utils/fileassociation.py
new file mode 100644
index 00000000000..55fd9735bfb
--- /dev/null
+++ b/intern/python/modules/mcf/utils/fileassociation.py
@@ -0,0 +1,65 @@
+### WARNING:
+# I don't have a clue what I'm doing here!
+
+import win32api
+### Following is the "normal" approach,
+### but it requires loading the entire win32con file (which is big)
+### for two values...
+##import win32con
+##HKEY_CLASSES_ROOT = win32con.HKEY_CLASSES_ROOT
+##REG_SZ = win32con.REG_SZ
+
+### These are the hard-coded values, should work everywhere as far as I know...
+HKEY_CLASSES_ROOT = 0x80000000
+REG_SZ= 1
+
+def associate( extension, filetype, description="", commands=(), iconfile="" ):
+ '''Warning: I don't have a clue what I'm doing here!
+ extension -- extension including "." character, e.g. .proc
+ filetype -- formal name, no spaces allowed, e.g. SkeletonBuilder.RulesFile
+ description -- human-readable description of the file type
+ commands -- sequence of (command, commandline), e.g. (("Open", "someexe.exe %1"),)
+ iconfile -- optional default icon file for the filetype
+ '''
+ win32api.RegSetValue(
+ HKEY_CLASSES_ROOT,
+ extension,
+ REG_SZ,
+ filetype
+ )
+ if description:
+ win32api.RegSetValue(
+ HKEY_CLASSES_ROOT ,
+ filetype,
+ REG_SZ,
+ description
+ )
+ if iconfile:
+ win32api.RegSetValue(
+ HKEY_CLASSES_ROOT ,
+ "%(filetype)s\\DefaultIcon" % locals(),
+ REG_SZ,
+ iconfile
+ )
+ for (command, commandline) in commands:
+ win32api.RegSetValue(
+ HKEY_CLASSES_ROOT ,
+ "%(filetype)s\\Shell\\%(command)s" % locals(),
+ REG_SZ,
+ command,
+ )
+ win32api.RegSetValue(
+ HKEY_CLASSES_ROOT ,
+ "%(filetype)s\\Shell\\%(command)s\\Command" % locals(),
+ REG_SZ,
+ commandline
+ )
+
+if __name__ == "__main__":
+ associate(
+ ".proc",
+ "SkeletonBuilder.Processing",
+ "SkeletonBuilder Processing File",
+ (("Open", '''z:\\skeletonbuilder\\skeletonbuilder.exe "%1" %*'''),),
+ '''z:\\skeletonbuilder\\bitmaps\\skeletonbuildericon.ico''',
+ ) \ No newline at end of file
diff --git a/intern/python/modules/mcf/utils/findourfile.py b/intern/python/modules/mcf/utils/findourfile.py
new file mode 100644
index 00000000000..1c70ff0dd70
--- /dev/null
+++ b/intern/python/modules/mcf/utils/findourfile.py
@@ -0,0 +1,30 @@
+'''
+This utility allows a python system to find a file in it's
+directory. To do this, you need to pass it a function object from
+a module in the correct directory. I know there must be a better
+way to do this, but I haven't seen it yet. Incidentally, the
+current directory should be _different_ from the module in which
+the function is contained, otherwise this function will go off into
+the root directory.
+
+Currently this has to be called with the current directory a directory
+other than the directory we're trying to find... need a better solution
+for this kind of thing... a python registry would be great :)
+
+NOTE: as of Python 1.5, this module should be obsolete! As soon as I
+have verified that all of my code is fixed, it will be moved to the unused
+directories.
+'''
+import os,sys
+
+def findourfile(function, filename):
+ '''
+ Given the function, return a path to the a file in the
+ same directory with 'filename'. We also let the caller
+ know if the file already exists.
+ '''
+ ourfilename = os.path.split(function.func_code.co_filename)[0]+os.sep+filename
+ exists = os.path.exists(ourfilename)
+ return (exists,ourfilename)
+
+
diff --git a/intern/python/modules/mcf/utils/hier_rx.py b/intern/python/modules/mcf/utils/hier_rx.py
new file mode 100644
index 00000000000..3770f0bab22
--- /dev/null
+++ b/intern/python/modules/mcf/utils/hier_rx.py
@@ -0,0 +1,201 @@
+'''
+Simple Hierarchic Walking functions for use with hierobj-type objects.
+
+Provide for recurse-safe processing. Currently only provide depth-first
+processing, and don't provide means for ignoring branches of the tree
+during processing. For an example of breadth-first processing, see
+mcf.pars.int.index.indutils. For more complex hierarchic processing,
+see the mcf.walker package.
+
+Originally these functions were only methods of the hierobj class (they
+still are methods of it). I've split them out to allow them to be
+imported selectively by other classes (some classes will only want
+the simple walking functions, and not want to be bothered with the
+methods which hierobj uses to keep track of its particular internal
+structures.
+'''
+
+def hier_rapply(self, function,arglist=None,argdict={},moreattr = '__childlist__'):
+ '''
+ Safely apply a function to self and all children for
+ the function's side effects. Discard the return values
+ that function returns.
+
+ function
+ function to apply
+ arglist
+ (self,)+arglist is the set of arguments passed to function
+ argdict
+ passed as namedargs to the function
+ moreattr
+ the attribute representing the children of a node
+ '''
+ alreadydone = {}
+ tobedone = [self]
+ if arglist or argdict:
+ if not arglist: arglist=[self]
+ else:
+ arglist.insert(0,self) # we could insert anything... self is convenient
+ while tobedone:
+ object = tobedone[0]
+ try:
+ alreadydone[id(object)]
+ # We've already processed this object
+ except KeyError:
+ # We haven't processed this object
+ alreadydone[id(object)]=1
+ arglist[0]=object
+ apply(function,tuple(arglist),argdict)
+ try:
+ tobedone[1:1]=getattr(object,moreattr)
+ except AttributeError:
+ # if the object isn't a hierobj, we don't need to recurse into it.
+ pass
+ del(tobedone[0])
+ else: # no arglist or argdict
+ while tobedone:
+ object = tobedone[0]
+ try:
+ alreadydone[id(object)]
+ # We've already processed this object
+ except KeyError:
+ # We haven't processed this object
+ alreadydone[id(object)]=1
+ function(object)
+ try:
+ tobedone[1:1]=getattr(object,moreattr)
+ except AttributeError:
+ # if the object isn't a hierobj, we don't need to recurse into it.
+ pass
+ del(tobedone[0])
+def hier_rreturn(self, function,arglist=None,argdict={},moreattr = '__childlist__'):
+ '''
+ Safely apply a function to self and all children,
+ collect the results in a list and return.
+
+ function
+ function to apply
+ arglist
+ (self,)+arglist is the set of arguments passed to function
+ argdict
+ passed as namedargs to the function
+ moreattr
+ the attribute representing the children of a node
+ '''
+ alreadydone = {}
+ tobedone = [self]
+ results = []
+ if arglist or argdict:
+ if not arglist: arglist=[self]
+ else:
+ arglist.insert(0,self) # or anything you feel like
+ while tobedone:
+ object = tobedone[0]
+ try:
+ alreadydone[id(object)]
+ # We've already processed this object
+ except KeyError:
+ # We haven't processed this object
+ alreadydone[id(object)]=1
+ arglist[0]=object
+ results.append(apply(function,tuple(arglist),argdict))
+ try:
+ tobedone[1:1]=getattr(object,moreattr)
+ except AttributeError:
+ # if the object isn't a hierobj, we don't need to recurse into it.
+ pass
+ del(tobedone[0])
+ else:
+ while tobedone:
+ object = tobedone[0]
+ try:
+ alreadydone[id(object)]
+ # We've already processed this object
+ except KeyError:
+ # We haven't processed this object
+ alreadydone[id(object)]=1
+ results.append(function(object))
+ try:
+ tobedone[1:1]=getattr(object,moreattr)
+ except AttributeError:
+ # if the object isn't a hierobj, we don't need to recurse into it.
+ pass
+ del(tobedone[0])
+ return results
+def hier_rgetattr(self, attrname, multiple=1, moreattr = '__childlist__'):
+ '''
+ Recursively collect the values for attrname and
+ return as a list.
+
+ attrname
+ attribute to collect
+ arglist
+ (self,)+arglist is the set of arguments passed to function
+ argdict
+ passed as namedargs to the function
+ moreattr
+ the attribute representing the children of a node
+ '''
+ alreadydone = {}
+ tobedone = [self]
+ results = []
+ while tobedone:
+ object = tobedone[0]
+ try:
+ alreadydone[id(object)]
+ # We've already processed this object
+ except KeyError:
+ # We haven't processed this object
+ alreadydone[id(object)]=1
+ try:
+ if multiple:
+ results.append(getattr(object, attrname))
+ else:
+ return getattr(object, attrname)
+ except AttributeError:
+ pass
+ try:
+ tobedone[1:1]=getattr(object,moreattr)
+ except AttributeError:
+ # if the object isn't a hierobj, we don't need to recurse into it.
+ pass
+ del(tobedone[0])
+ return results
+def hier_rmethod(self, methodname,arglist=(),argdict={},moreattr = '__childlist__'):
+ '''
+ return the result of calling every object's method methodname,
+ as for hier_rreturn otherwise.
+
+ methodname
+ method to call
+ arglist
+ (self,)+arglist is the set of arguments passed to function
+ argdict
+ passed as namedargs to the function
+ moreattr
+ the attribute representing the children of a node
+ '''
+
+ alreadydone = {}
+ tobedone = [self]
+ results = []
+ while tobedone:
+ object = tobedone[0]
+ try:
+ alreadydone[id(object)]
+ # We've already processed this object
+ except KeyError:
+ # We haven't processed this object
+ alreadydone[id(object)]=1
+ try:
+ results.append(apply(getattr(object,methodname),arglist,argdict))
+ except:
+ pass
+ try:
+ tobedone[1:1]=getattr(object,moreattr)
+ except AttributeError:
+ # if the object isn't a hierobj, we don't need to recurse into it.
+ pass
+ del(tobedone[0])
+ return results
+
diff --git a/intern/python/modules/mcf/utils/hierdummy.py b/intern/python/modules/mcf/utils/hierdummy.py
new file mode 100644
index 00000000000..0cf86e9e0c0
--- /dev/null
+++ b/intern/python/modules/mcf/utils/hierdummy.py
@@ -0,0 +1,16 @@
+'''
+Hierarchic 'Dummy' objects
+'''
+
+import hierobj, dummy
+
+class HierobjDummy(hierobj.Hierobj,dummy.Dummy):
+ '''
+ An Hierarchic Dummy object, which provides direct access to its
+ children through object[x] interfaces, allows "index" "count"
+ etceteras by returning the corresponding attributes of the _base.
+ '''
+ def __init__(self, parent=None, childlist=None):
+ hierobj.Hierobj.__init__(self, parent, childlist)
+ self._base = self.__childlist__ #set by init function above
+
diff --git a/intern/python/modules/mcf/utils/hierobj.py b/intern/python/modules/mcf/utils/hierobj.py
new file mode 100644
index 00000000000..7730b4d3ba4
--- /dev/null
+++ b/intern/python/modules/mcf/utils/hierobj.py
@@ -0,0 +1,133 @@
+'''
+Generic Hierarchic Objects Module
+Hierobj's store their children (which can be anything) in their
+__childlist__ attribute, and provide methods for walking the
+hierarchy, either collecting results or not.
+
+The index function returns an index of the objects (effectively a
+flattened copy of the hierarchy)
+
+97-03-17 Added ability to pass arguments to hier_rapply and hier_rreturn.
+97-10-31 Removed dependencies on mcf.store
+'''
+import copy,types
+import singletonlist, hier_rx
+
+class Hierobj:
+ '''
+ An abstract class which handles hierarchic functions and information
+ # remade as a DAG 97-04-02, also reduced memory overhead for
+ hier-r* functions by using while-del-IndexError construct versus
+ for loop (probably makes it slower though)
+ If you require a true hierarchy, use the TrueHierobj class below...
+ '''
+ def __init__(self, parent=None, childlist=None):
+ if parent is None: # passed no parents
+ self.__dict__['__parent__'] = []
+ elif type(parent) == types.ListType: # passed a list of parents
+ self.__dict__['__parent__'] = parent
+ else: # passed a single parent
+ self.__dict__['__parent__'] = [parent]
+ self.__dict__['__childlist__'] = childlist or []
+ for child in self.__childlist__:
+ try:
+ child.__parent__.append(self)
+ except:
+ pass
+ # import simple hierarchic processing methods
+ hier_rapply = hier_rx.hier_rapply
+ hier_rreturn = hier_rx.hier_rreturn
+ hier_rgetattr = hier_rx.hier_rgetattr
+ hier_rmethod = hier_rx.hier_rmethod
+
+
+ def hier_addchild(self, child):
+ '''
+ Add a single child to the childlist
+ '''
+ self.__childlist__.append(child)
+ try:
+ # Hierobj-aware child
+ child.__parent__.append(self) # raises error if not hier_obj aware
+ except (TypeError, AttributeError):
+ # Non Hierobj-aware child
+ pass
+ append = hier_addchild
+ def hier_remchild(self, child):
+ '''
+ Breaks the child relationship with child, including the
+ reciprocal parent relationship
+ '''
+ try:
+ self.__childlist__.remove(child)
+ try:
+ child.hier_remparent(self) # if this fails, no problem
+ except AttributeError: pass
+ except (AttributeError,ValueError):
+ return 0 # didn't manage to remove the child
+ return 1 # succeeded
+ def hier_remparent(self, parent):
+ '''
+ Normally only called by hier_remchild of the parent,
+ just removes the parent from the child's parent list,
+ but leaves child in parent's childlist
+ '''
+ try:
+ self.__parent__.remove(parent)
+ except (AttributeError,ValueError):
+ return 0
+ return 1
+ def hier_replacewith(self,newel):
+ '''
+ As far as the hierarchy is concerned, the new element
+ is exactly the same as the old element, it has all
+ the same children, all the same parents. The old
+ element becomes completely disconnected from the hierarchy,
+ but it still retains all of its references
+
+ For every parent, replace this as a child
+ For every child, replace this as the parent
+ '''
+ for parent in self.__parent__:
+ try:
+ parent.hier_replacechild(self, newel)
+ except AttributeError:
+ pass
+ for child in self.__childlist__:
+ try:
+ child.hier_replaceparent(self,parent)
+ except AttributeError:
+ pass
+ def hier_replaceparent(self, oldparent, newparent):
+ ind = self.__parent__.index(oldparent)
+ self.__parent__[ind] = newparent
+ def hier_replacechild(self, oldchild, newchild):
+ ind = self.__childlist__.index(oldchild)
+ self.__childlist__[ind] = newchild
+
+class TrueHierobj(Hierobj):
+ '''
+ An inefficient implementation of an Hierobj which limits the
+ __parent__ attribute to a single element. This will likely be
+ _slower_ than an equivalent Hierobj. That will have to be fixed
+ eventually.
+ '''
+ def __init__(self, parent=None, childlist=[]):
+ if parent is None: # passed no parents
+ self.__dict__['__parent__'] = singletonlist.SingletonList()
+ else: # passed a single parent
+ self.__dict__['__parent__'] = singletonlist.SingletonList(parent)
+ self.__dict__['__childlist__'] = copy.copy(childlist)
+ for child in self.__childlist__:
+ try:
+ child.__parent__.append(self)
+ except:
+ pass
+
+def index(grove):
+ '''
+ Returns a flattened version of the grove
+ '''
+ return grove.hier_rreturn(lambda x: x)
+
+
diff --git a/intern/python/modules/mcf/utils/in_place_ops.py b/intern/python/modules/mcf/utils/in_place_ops.py
new file mode 100644
index 00000000000..7d64f196597
--- /dev/null
+++ b/intern/python/modules/mcf/utils/in_place_ops.py
@@ -0,0 +1,38 @@
+class inplace:
+ def __add__( self, num ):
+ self.base = self.base + num
+ return self.base
+ def __sub__( self, num ):
+ self.base = self.base - num
+ return self.base
+ def __init__(self, base ):
+ self.base = base
+ def __repr__(self ):
+ return repr( self.base)
+ def __str__(self ):
+ return str( self.base)
+ __radd__ = __add__
+ def __mul__(self, num ):
+ return self.base * num
+ def __div__(self, num ):
+ return self.base / num
+ def __mod__(self, num ):
+ return self.base % num
+ def __neg__(self ):
+ return - abs( self.base)
+ def __pos__(self ):
+ return abs( self.base)
+ def __abs__(self ):
+ return abs( self.base )
+ def __inv__(self ):
+ return -self.base
+ def __lshift__(self, num ):
+ return self.base << num
+ def __rshift__(self, num ):
+ return self.base >> num
+ def __and__(self, num ):
+ return self.base and num
+ def __or__(self, num ):
+ return self.base or num
+ def value( self ):
+ return self.base \ No newline at end of file
diff --git a/intern/python/modules/mcf/utils/namespace.py b/intern/python/modules/mcf/utils/namespace.py
new file mode 100644
index 00000000000..819531e10db
--- /dev/null
+++ b/intern/python/modules/mcf/utils/namespace.py
@@ -0,0 +1,224 @@
+'''
+NameSpace v0.04:
+
+A "NameSpace" is an object wrapper around a _base dictionary
+which allows chaining searches for an 'attribute' within that
+dictionary, or any other namespace which is defined as part
+of the search path (depending on the downcascade variable, is
+either the hier-parents or the hier-children).
+
+You can assign attributes to the namespace normally, and read
+them normally. (setattr, getattr, a.this = that, a.this)
+
+I use namespaces for writing parsing systems, where I want to
+differentiate between sources (have multiple sources that I can
+swap into or out of the namespace), but want to be able to get
+at them through a single interface. There is a test function
+which gives you an idea how to use the system.
+
+In general, call NameSpace(someobj), where someobj is a dictionary,
+a module, or another NameSpace, and it will return a NameSpace which
+wraps up the keys of someobj. To add a namespace to the NameSpace,
+just call the append (or hier_addchild) method of the parent namespace
+with the child as argument.
+
+### NOTE: if you pass a module (or anything else with a dict attribute),
+names which start with '__' will be removed. You can avoid this by
+pre-copying the dict of the object and passing it as the arg to the
+__init__ method.
+
+### NOTE: to properly pickle and/or copy module-based namespaces you
+will likely want to do: from mcf.utils import extpkl, copy_extend
+
+### Changes:
+ 97.05.04 -- Altered to use standard hierobj interface, cleaned up
+ interface by removing the "addparent" function, which is reachable
+ by simply appending to the __parent__ attribute, though normally
+ you would want to use the hier_addchild or append functions, since
+ they let both objects know about the addition (and therefor the
+ relationship will be restored if the objects are stored and unstored)
+
+ 97.06.26 -- Altered the getattr function to reduce the number of
+ situations in which infinite lookup loops could be created
+ (unfortunately, the cost is rather high). Made the downcascade
+ variable harden (resolve) at init, instead of checking for every
+ lookup. (see next note)
+
+ 97.08.29 -- Discovered some _very_ weird behaviour when storing
+ namespaces in mcf.store dbases. Resolved it by storing the
+ __namespace_cascade__ attribute as a normal attribute instead of
+ using the __unstore__ mechanism... There was really no need to
+ use the __unstore__, but figuring out how a functions saying
+ self.__dict__['__namespace_cascade__'] = something
+ print `self.__dict__['__namespace_cascade__']` can print nothing
+ is a bit beyond me. (without causing an exception, mind you)
+
+ 97.11.15 Found yet more errors, decided to make two different
+ classes of namespace. Those based on modules now act similar
+ to dummy objects, that is, they let you modify the original
+ instead of keeping a copy of the original and modifying that.
+
+ 98.03.15 -- Eliminated custom pickling methods as they are no longer
+ needed for use with Python 1.5final
+
+ 98.03.15 -- Fixed bug in items, values, etceteras with module-type
+ base objects.
+'''
+import copy, types, string
+import hierobj
+
+class NameSpace(hierobj.Hierobj):
+ '''
+ An hierarchic NameSpace, allows specification of upward or downward
+ chaining search for resolving names
+ '''
+ def __init__(self, val = None, parents=None, downcascade=1,children=[]):
+ '''
+ A NameSpace can be initialised with a dictionary, a dummied
+ dictionary, another namespace, or something which has a __dict__
+ attribute.
+ Note that downcascade is hardened (resolved) at init, not at
+ lookup time.
+ '''
+ hierobj.Hierobj.__init__(self, parents, children)
+ self.__dict__['__downcascade__'] = downcascade # boolean
+ if val is None:
+ self.__dict__['_base'] = {}
+ else:
+ if type( val ) == types.StringType:
+ # this is a reference to a module which has been pickled
+ val = __import__( val, {},{}, string.split( val, '.') )
+ try:
+ # See if val's a dummy-style object which has a _base
+ self.__dict__['_base']=copy.copy(val._base)
+ except (AttributeError,KeyError):
+ # not a dummy-style object... see if it has a dict attribute...
+ try:
+ if type(val) != types.ModuleType:
+ val = copy.copy(val.__dict__)
+ except (AttributeError, KeyError):
+ pass
+ # whatever val is now, it's going to become our _base...
+ self.__dict__['_base']=val
+ # harden (resolve) the reference to downcascade to speed attribute lookups
+ if downcascade: self.__dict__['__namespace_cascade__'] = self.__childlist__
+ else: self.__dict__['__namespace_cascade__'] = self.__parent__
+ def __setattr__(self, var, val):
+ '''
+ An attempt to set an attribute should place the attribute in the _base
+ dictionary through a setitem call.
+ '''
+ # Note that we use standard attribute access to allow ObStore loading if the
+ # ._base isn't yet available.
+ try:
+ self._base[var] = val
+ except TypeError:
+ setattr(self._base, var, val)
+ def __getattr__(self,var):
+## print '__getattr__', var
+ return self.__safe_getattr__(var, {}) # the {} is a stopdict
+
+ def __safe_getattr__(self, var,stopdict):
+ '''
+ We have a lot to do in this function, if the attribute is an unloaded
+ but stored attribute, we need to load it. If it's not in the stored
+ attributes, then we need to load the _base, then see if it's in the
+ _base.
+ If it's not found by then, then we need to check our resource namespaces
+ and see if it's in them.
+ '''
+ # we don't have a __storedattr__ or it doesn't have this key...
+ if var != '_base':
+ try:
+ return self._base[var]
+ except (KeyError,TypeError), x:
+ try:
+ return getattr(self._base, var)
+ except AttributeError:
+ pass
+ try: # with pickle, it tries to get the __setstate__ before restoration is complete
+ for cas in self.__dict__['__namespace_cascade__']:
+ try:
+ stopdict[id(cas)] # if succeeds, we've already tried this child
+ # no need to do anything, if none of the children succeeds we will
+ # raise an AttributeError
+ except KeyError:
+ stopdict[id(cas)] = None
+ return cas.__safe_getattr__(var,stopdict)
+ except (KeyError,AttributeError):
+ pass
+ raise AttributeError, var
+ def items(self):
+ try:
+ return self._base.items()
+ except AttributeError:
+ pass
+ try:
+ return self._base.__dict__.items()
+ except AttributeError:
+ pass
+ def keys(self):
+ try:
+ return self._base.keys()
+ except AttributeError:
+ pass
+ try:
+ return self._base.__dict__.keys()
+ except AttributeError:
+ pass
+ def has_key( self, key ):
+ try:
+ return self._base.has_key( key)
+ except AttributeError:
+ pass
+ try:
+ return self._base.__dict__.has_key( key)
+ except AttributeError:
+ pass
+ def values(self):
+ try:
+ return self._base.values()
+ except AttributeError:
+ pass
+ try:
+ return self._base.__dict__.values()
+ except AttributeError:
+ pass
+
+ def __getinitargs__(self):
+ if type( self._base ) is types.ModuleType:
+ base = self._base.__name__
+ else:
+ base = self._base
+ return (base, self.__parent__, self.__downcascade__, self.__childlist__)
+ def __getstate__(self):
+ return None
+ def __setstate__(self,*args):
+ pass
+ def __deepcopy__(self, memo=None):
+ d = id(self)
+ if memo is None:
+ memo = {}
+ elif memo.has_key(d):
+ return memo[d]
+ if type(self._base) == types.ModuleType:
+ rest = tuple(map( copy.deepcopy, (self.__parent__, self.__downcascade__, self.__childlist__) ))
+ new = apply(self.__class__, (self._base,)+rest )
+ else:
+ new = tuple(map( copy.deepcopy, (self._base, self.__parent__, self.__downcascade__, self.__childlist__) ))
+ return new
+## def __del__( self, id=id ):
+## print 'del namespace', id( self )
+
+
+def test():
+ import string
+ a = NameSpace(string)
+ del(string)
+ a.append(NameSpace({'a':23,'b':42}))
+ import math
+ a.append(NameSpace(math))
+ print 'The returned object should allow access to the attributes of the string,\nand math modules, and two simple variables "a" and "b" (== 23 and42 respectively)'
+ return a
+
+
diff --git a/intern/python/modules/mcf/utils/quote.py b/intern/python/modules/mcf/utils/quote.py
new file mode 100644
index 00000000000..5f6dccdd511
--- /dev/null
+++ b/intern/python/modules/mcf/utils/quote.py
@@ -0,0 +1,78 @@
+'''
+Generic quoting functions (very fast),
+generalised to allow use in any number of
+situations, but normally you'll want to create
+a new function based on these patterns which
+has the default args you need. This will
+prevent an extra function call.
+'''
+import string, regex
+# create a translator which is fully worked out...
+
+def _quote(somestring,trans,start='"',stop='"'):
+ '''
+ Return a quoted version of somestring.
+ '''
+ # would be _so_ much better if we could use the
+ # getitem, consider...
+ # return '%s%s%s'%(start,string.join(map(trans.__getitem__, somestring), ''),stop)
+ temp = list(somestring)
+ for charno in xrange(len(temp)):
+ temp[charno]= trans[temp[charno]]
+ return '%s%s%s'%(start,string.join(temp, ''),stop)
+
+def compilerex(trans):
+ '''
+ Compiles a suitable regex from a dictionary
+ translation table. Should be used at design
+ time in most cases to improve speed. Note:
+ is not a very intelligent algo. You could
+ do better by creating a character-class []
+ for the single-character keys and then the
+ groups for the or-ing after it, but I've not
+ got the time at the moment.
+ '''
+ keyset = trans.keys()
+ multitrans = []
+ for x in range(len(keyset)):
+ if len(keyset[x]) != len(trans[keyset[x]]):
+ multitrans.append((keyset[x],trans[keyset[x]]))
+ if len(keyset[x])!= 1:
+ keyset[x] = '\(%s\)'%keyset[x]
+ if multitrans:
+ return 1,regex.compile(string.join(keyset,'\|'))
+
+
+def quote2(somestring,trans,rex,start='',stop=''):
+ '''
+ Should be a faster version of _quote once
+ the regex is built. Rex should be a simple
+ or'ing of all characters requiring substitution,
+ use character ranges whereever possible (should
+ be in most cases)
+ '''
+ temp = list(somestring)
+ curpos = 0
+ try:
+ while rex.search(somestring,curpos) != -1:
+ pos = rex.regs[0]
+ print pos
+ replacement = list(trans[rex.group(0)])
+ temp[pos[0]:pos[1]] = replacement
+ curpos = pos[0]+len(replacement)
+ except (IndexError,regex.error):
+ pass
+ return '%s%s%s'%(start,string.join(temp, ''),stop)
+# compatability
+_quote2 = quote2
+
+def reprq(obj, qtype):
+ '''
+ Return representation of a string obj as a string with qtype
+ quotes surrounding it. Usable when linearising Python objects
+ to languages which have only a particular type of string. (Such
+ as VRML). This is not a generalised nor a particularly reliable
+ solution. You should use the _quote2 function instead.
+ '''
+ return '%s%s%s'%(qtype,string.join(string.split(string.join(string.split(obj, '\\'), '\\\\'), qtype), '\\%s'%qtype),qtype)
+
diff --git a/intern/python/modules/mcf/utils/rangeval.py b/intern/python/modules/mcf/utils/rangeval.py
new file mode 100644
index 00000000000..dd166dbebfb
--- /dev/null
+++ b/intern/python/modules/mcf/utils/rangeval.py
@@ -0,0 +1,64 @@
+''' Classes which match ranges, sets, or anything at all. '''
+import dummy # provides storage functions as well as a few others
+
+class BetwVal(dummy.Dummy):
+ '''
+ Matches any object greater than smaller and less than larger
+ '''
+ def __init__(self, first, second):
+ if first <= second:
+ dummy.Dummy.__init__(self, [first, second])
+ else:
+ dummy.Dummy.__init__(self, [second, first])
+ def __getinitargs__(self):
+ return (self._base[0], self._base[1])
+ def __cmp__(self, object):
+ '''The Guts of the Class, allows standard comparison operators'''
+ if self._base[0]<=object:
+ if self._base[1] >=object:
+ return 0
+ else: return 1
+ else: return -1
+ def __repr__(self):
+ return '%s(%s,%s)'% (self.__class__.__name__,`self._base[0]`,`self._base[1]`)
+
+class WInVal(dummy.Dummy):
+ '''
+ Matches any value in the sequential object used as initialiser
+ Doesn't gracefully handle situations where not found, as it just
+ returns a -1
+ '''
+ def __init__(self,seq):
+ self._base = seq
+ def __cmp__(self, object):
+ ''' Standard comparison operators '''
+ for x in self._base:
+ if x == object:
+ return 0
+ return -1
+ def __repr__(self):
+ return '%s(%s)'% (self.__class__.__name__,`self._base`)
+
+class ExceptVal(WInVal):
+ '''
+ A negative Version of WInVal
+ '''
+ def __cmp__(self, object):
+ for x in self._base:
+ if x == object:
+ return -1
+ return 0
+
+class AnyVal:
+ '''
+ Matches anything at all
+ '''
+ def __init__(self):
+ pass
+ def __getinitargs__(self):
+ return ()
+ def __cmp__(self, object):
+ return 0
+ def __repr__(self):
+ return 'AnyVal()'
+
diff --git a/intern/python/modules/mcf/utils/regutils_ex.py b/intern/python/modules/mcf/utils/regutils_ex.py
new file mode 100644
index 00000000000..5ef48454e78
--- /dev/null
+++ b/intern/python/modules/mcf/utils/regutils_ex.py
@@ -0,0 +1,158 @@
+import win32api, win32con, string, types
+
+def _getDataType( data, coerce = 1 ):
+ '''
+ Return a tuple of dataType, data for a given object
+ automatically converts non-string-or-tuple-data into
+ strings by calling pickle.dumps
+ '''
+ if type( data ) is types.StringType:
+ return win32con.REG_SZ, data
+ elif type( data ) is types.IntType:
+ return win32con.REG_DWORD, data
+ # what about attempting to convert Longs, floats, etceteras to ints???
+ elif coerce:
+ import pickle
+ return win32con.REG_SZ, pickle.dumps( data )
+ else:
+ raise TypeError, '''Unsupported datatype for registry, use getDataType( data, coerce=1) to store types other than string/int.'''
+
+def _getBaseKey( fullPathSpec ):
+ '''
+ Split a "full path specification" registry key
+ into its root and subpath components
+ '''
+ key = ''
+ subkey = fullPathSpec
+ # while loop will strip off preceding \\ characters
+ while subkey and not key:
+ key, subkey = string.split( fullPathSpec, '\\', 1 )
+ try:
+ return getattr( win32con, key ), subkey
+ except AttributeError:
+ raise '''Unknown root key %s in registry path %s'''% (key, fullPathSpec)
+
+def RegSetValue( key, valuename='', data='', allowPickling=1 ):
+ '''
+ Set a registry value by providing a fully-specified
+ registry key (and an optional sub-key/value name),
+ and a data element. If allowPickling is true, the
+ data element can be any picklable element, otherwise
+ data element must be a string or integer.
+ '''
+ root, subkey = _getBaseKey( key )
+ dataType, data = _getDataType( data, allowPickling )
+ try:
+ hKey = win32api.RegOpenKeyEx( root , subkey, 0, win32con.KEY_ALL_ACCESS) # could we use a lesser access model?
+ except:
+ hKey = win32api.RegCreateKey( root, subkey )
+ try:
+ if not valuename: # the default value
+ win32api.RegSetValue( hKey, valuename, dataType, data )
+ else: # named sub-value
+ win32api.RegSetValueEx( hKey, valuename, 0, dataType, data )
+ finally:
+ win32api.RegCloseKey( hKey)
+
+def RegQueryValue( key, valuename='', pickling=0 ):
+ '''
+ Get a registry value by providing a fully-specified
+ registry key (and an optional sub-key/value name)
+ If pickling is true, the data element will be
+ unpickled before being returned.
+ '''
+ #print 'key', key
+ root, subkey = _getBaseKey( key )
+ if not valuename: # the default value
+ data, type = win32api.RegQueryValue( root , subkey)
+ else:
+ try:
+ #print root, subkey
+ hKey = win32api.RegOpenKeyEx( root, subkey, 0, win32con.KEY_READ)
+ #print hKey, valuename
+ try:
+ data, type = win32api.RegQueryValueEx( hKey, valuename )
+ except: #
+ data, type = None, 0 # value is not available...
+ pickling = None
+ finally:
+ win32api.RegCloseKey( hKey)
+ if pickling:
+ import pickle
+ data = pickle.loads( data )
+ return data
+
+# following constants seem to reflect where path data is stored on NT machines
+# no idea if it'll work on a 95 machine
+
+def AddPathEntry( newEntry, user = 1, prepend=0 ):
+ '''
+ Add or remove path entry on NT, use prepend == -1 for removal,
+ use prepend == 0 for append, prepend= 1 for prepending to the
+ current path.
+ '''
+ if user:
+ user = 'USER'
+ else:
+ user = 'MACHINE'
+ key, valuename = COMMON_KEYS[ (user, 'PATH') ]
+ _PathManager( key, valuename, newEntry, prepend )
+
+def PyExecutables( user = 1, prepend=0 ):
+ '''
+ Register/Deregister Python files as executables
+ '''
+ if user:
+ user = 'USER'
+ else:
+ user = 'MACHINE'
+ key, valuename = COMMON_KEYS[ (user, 'PYEXECUTABLES') ]
+ # the default executables + Python scripts...
+ if prepend < 0: # are to eliminate only .py
+ newEntry = '.PY'
+ else:
+ newEntry = '.PY;.COM;.EXE;.BAT;.CMD'
+ _PathManager( key, valuename, newEntry, prepend )
+
+def _PathManager( key, valuename, newEntry, prepend=0, eliminate_duplicates=1 ):
+ '''
+ Create a new Path entry on NT machines (or kill an old one)
+ user determines whether to alter the USER or the Machine's path
+ prepend
+ 1 -> add newEntry to start
+ 0 -> add newEntry to end
+ -1 -> don't add newEntry
+ eliminate_duplicates determines whether to kill equal paths
+
+ All values are converted to lower case
+ '''
+ # get current value...
+ curval = RegQueryValue( key, valuename ) or ''
+ # split into elements
+ curval = string.split( string.lower(curval), ';' )
+ if type( newEntry ) not in (types.ListType, types.TupleType):
+ newEntry = string.split( string.lower(newEntry), ';' )
+ # eliminate duplicates of the newEntry
+ curval = filter( None, curval) # strip out null entries
+ if eliminate_duplicates:
+ newval = []
+ for p in curval:
+ if p not in newEntry:
+ newval.append( p )
+ curval = newval
+ if prepend == 1:
+ curval = list(newEntry) + curval
+ elif prepend == 0:
+ curval = curval + list( newEntry )
+ elif prepend == -1: # this call is just killing the path entry
+ pass
+ #now do the recombination
+ curval = string.join( curval, ';' )
+ RegSetValue( key, valuename, curval )
+
+COMMON_KEYS = {
+('USER','PATH') : ('''HKEY_CURRENT_USER\\Environment''', 'path'),
+('MACHINE','PATH') : ('''HKEY_LOCAL_MACHINE\\SYSTEM\\CurrentControlSet\\Control\\Session Manager\\Environment''', 'path'),
+('USER','PYEXECUTABLES') : ('''HKEY_CURRENT_USER\\Environment''', 'pathext'),
+('MACHINE','PYEXECUTABLES') : ('''HKEY_LOCAL_MACHINE\\SYSTEM\\CurrentControlSet\\Control\\Session Manager\\Environment''', 'pathext')
+}
diff --git a/intern/python/modules/mcf/utils/reloader.py b/intern/python/modules/mcf/utils/reloader.py
new file mode 100644
index 00000000000..2d7e2591ed2
--- /dev/null
+++ b/intern/python/modules/mcf/utils/reloader.py
@@ -0,0 +1,33 @@
+import sys, string
+
+class Reloader:
+ '''
+ Class allows for reloading all modules imported
+ after the instance is created. Normally you will
+ use this by doing:
+ import <anything you don't want reloaded>
+ from mcf.utils import reloader
+ <do testing and rewriting>
+ reloader.go()
+ '''
+ def __init__(self):
+ self.keys = sys.modules.keys()
+ def __call__(self, *args, **namedargs):
+ done = []
+ for key, val in sys.modules.items():
+ if key not in self.keys:
+ try:
+ reload( val )
+ done.append( key )
+ except (ImportError):
+ print '''Couldn't reload module:''', key
+ except (TypeError): # for None's
+ # is a flag to prevent reloading
+ pass
+ if done:
+ print '''Reloaded:''', string.join( done, ', ')
+ else:
+ print '''No modules reloaded'''
+
+# the default reloader...
+go = Reloader() \ No newline at end of file
diff --git a/intern/python/modules/mcf/utils/singletonlist.py b/intern/python/modules/mcf/utils/singletonlist.py
new file mode 100644
index 00000000000..5ca2f5000c2
--- /dev/null
+++ b/intern/python/modules/mcf/utils/singletonlist.py
@@ -0,0 +1,104 @@
+class SingletonList:
+ '''
+ A SingletonList always has a length of one or 0,
+ appends overwrite the single element, iteration will
+ return precisely one element. Attempts to get any item
+ other than 0 will raise an IndexError or return the single
+ item depending on whether the 'raiseIndexError' flag is
+ true or false (generally it should be true except if the
+ for x in SingletonList: construct is known never to be
+ used, since this construct will create an infinite loop
+ if we never raise an IndexError).
+ '''
+ def __init__(self,base=None,raiseIndexError=1):
+ self._base = base
+ self.raiseIndexError = raiseIndexError
+ def __len__(self):
+ '''
+ The length is 0 if no _base, 1 if a base
+ '''
+ if hasattr(self, '_base'):
+ return 1
+ else:
+ return 0
+ def __getitem__(self,ind):
+ '''
+ Get the item if ind == 0, else raise an IndexError or return
+ the item, depending on the raiseIndexError flag
+ '''
+ if ind == 0:
+ try:
+ return self._base
+ except AttributeError:
+ raise IndexError, ind
+ elif self.raiseIndexError:
+ raise IndexError, ind
+ else:
+ return self._base
+ def __setitem__(self,ind, item):
+ '''
+ The item is to become the base
+ '''
+ self._base = item
+ def __delitem__(self,ind):
+ '''
+ Delete the base, regardless of the index used
+ '''
+ try:
+ del(self._base)
+ except AttributeError:
+ raise IndexError, ind
+ def append(self,item):
+ '''
+ Replace the base with the item
+ '''
+ self._base = item
+ def index(self,item):
+ '''
+ if the item is the base, return the only valid index (0)
+ '''
+ try:
+ if item == self._base:
+ return 0
+ except:
+ pass
+ raise ValueError, item
+ def count(self, item):
+ '''
+ If the item is the base, we have one, else 0
+ '''
+ try:
+ if item == self._base:
+ return 1
+ except:
+ pass
+ return 0
+ insert = __setitem__
+ def remove(self, item):
+ '''
+ if the item is the base, delete the base, else ValueError
+ '''
+ try:
+ if item == self._base:
+ del(self._base)
+ return
+ except:
+ pass
+ raise ValueError, item
+ def reverse(self):
+ pass
+ def sort(self):
+ pass
+ def __repr__(self):
+ try:
+ return '[%s]'%`self._base`
+ except AttributeError:
+ return '[]'
+ # store and copy functions
+# def __getinitargs__(self):
+# return (self._base,self.raiseIndexError)
+# def __getstate__(self,*args,**namedargs):
+# pass
+# def __setstate__(self,*args,**namedargs):
+# pass
+
diff --git a/intern/python/modules/mcf/utils/tempclassmodule.py b/intern/python/modules/mcf/utils/tempclassmodule.py
new file mode 100644
index 00000000000..9fe6eed3918
--- /dev/null
+++ b/intern/python/modules/mcf/utils/tempclassmodule.py
@@ -0,0 +1,251 @@
+'''
+Generate module for holding temporary classes which
+will be reconstructed into the same module to allow
+cPickle and the like to properly import them.
+
+Note: You _must_ pickle a reference to the tempclassmodule
+_before_ you pickle any instances which use the classes stored
+in the module! Also, the classes cannot reference anything
+in their dictionary or bases tuples which are not normally
+pickleable (in particular, you can't subclass a class in the
+same tempclassmodule or a tempclassmodule which you cannot
+guarantee will be loaded before the dependent classes. (i.e.
+by guaranteeing they will be pickled first)
+'''
+import new, time, string, sys, types
+
+def buildModule(packagename, basename, rebuild=None, initialcontents=None):
+ '''
+ Dynamically build a module or rebuild one, generates
+ a persistent ID/name if not rebuilding. The persistent
+ ID is the value of basename+`time.time()` with the decimal
+ point removed (i.e. a long string of digits). Packagename
+ must be an importable package! Will raise an ImportError
+ otherwise. Also, for easy reconstitution, basename must not
+ include any decimal points.
+
+ initialcontents is a dictionary (or list) of elements which will be
+ added to the new module.
+ '''
+ if rebuild == None:
+ timestamp = `time.time()`
+ decpos = string.find(timestamp,'.')
+ basename = basename+timestamp[:decpos]+timestamp[decpos+1:]
+ name = string.join((packagename, basename), '.')
+ a = {}
+ b = {}
+ try: # see if we've already loaded this module...
+ mod = __import__( name, {},{}, string.split( name, '.'))
+ if initialcontents:
+ _updateFrom(mod, initialcontents)
+ return mod.__name__, mod
+ except ImportError:
+ pass
+ mod = new.module(name)
+ sys.modules[name] = mod
+ # following is just to make sure the package is loaded before attempting to alter it...
+ __import__( packagename, {}, {}, string.split(packagename) )
+## exec 'import %s'%(packagename) in a, b ### Security Risk!
+ setattr(sys.modules[ packagename ], basename, mod)
+ # now do the update if there were initial contents...
+ if initialcontents:
+ _updateFrom(mod, initialcontents)
+ return name, mod
+
+def buildClassIn(module, *classargs, **namedclassargs):
+ '''
+ Build a new class and register it in the module
+ as if it were really defined there.
+ '''
+ print module, classargs, namedclassargs
+ namedclassargs["__temporary_class__"] = 1
+ newclass = new.classobj(classargs[0], classargs[1], namedclassargs)
+ newclass.__module__ = module.__name__
+ setattr(module, newclass.__name__, newclass)
+ return newclass
+
+def addClass(module, classobj):
+ '''
+ Insert a classobj into the tempclassmodule, setting the
+ class' __module__ attribute to point to this tempclassmodule
+ '''
+ classobj.__module__ = module.__name__
+ setattr(module, classobj.__name__, classobj)
+ setattr( classobj, "__temporary_class__", 1)
+
+def delClass(module, classobj):
+ '''
+ Remove this class from the module, Note: after running this
+ the classobj is no longer able to be pickled/unpickled unless
+ it is subsequently added to another module. This is because
+ it's __module__ attribute is now pointing to a module which
+ is no longer going to save its definition!
+ '''
+ try:
+ delattr(module, classobj.__name__)
+ except AttributeError:
+ pass
+
+def _packageName(modulename):
+ decpos = string.rfind(modulename, '.')
+ return modulename[:decpos], modulename[decpos+1:]
+
+def _updateFrom(module, contentsource):
+ '''
+ For dealing with unknown datatypes (those passed in by the user),
+ we want to check and make sure we're building the classes correctly.
+ '''
+ # often will pass in a protoNamespace from which to update (during cloning)
+ if type(contentsource) in ( types.DictType, types.InstanceType):
+ contentsource = contentsource.values()
+ # contentsource should now be a list of classes or class-building tuples
+ for val in contentsource:
+ if type(val) is types.ClassType:
+ try:
+ addClass(module, val)
+ except:
+ pass
+ elif type(val) is types.TupleType:
+ try:
+ apply(buildClassIn, (module,)+val)
+ except:
+ pass
+
+def deconstruct(templatemodule):
+ '''
+ Return a tuple which can be passed to reconstruct
+ in order to get a rebuilt version of the module
+ after pickling. i.e. apply(reconstruct, deconstruct(tempmodule))
+ is the equivalent of doing a deepcopy on the tempmodule.
+ '''
+## import pdb
+## pdb.set_trace()
+ classbuilder = []
+ for name, classobj in templatemodule.__dict__.items():
+ if type(classobj) is types.ClassType: # only copy class objects, could do others, but these are special-purpose modules, not general-purpose ones.
+ classbuilder.append( deconstruct_class( classobj) )
+## import pdb
+## pdb.set_trace()
+ return (templatemodule.__name__, classbuilder)
+## except AttributeError:
+## print templatemodule
+## print classbuilder
+
+def deconstruct_class( classobj ):
+ '''
+ Pull apart a class into a tuple of values which can be used
+ to reconstruct it through a call to buildClassIn
+ '''
+ if not hasattr( classobj, "__temporary_class__"):
+ # this is a regular class, re-import on load...
+ return (classobj.__module__, classobj.__name__)
+ else:
+ # this is a temporary class which can be deconstructed
+ bases = []
+ for classobject in classobj.__bases__:
+ bases.append( deconstruct_class (classobject) )
+ return (classobj.__name__, tuple (bases), classobj.__dict__)
+
+
+def reconstruct(modulename, classbuilder):
+ '''
+ Rebuild a temporary module and all of its classes
+ from the structure created by deconstruct.
+ i.e. apply(reconstruct, deconstruct(tempmodule))
+ is the equivalent of doing a deepcopy on the tempmodule.
+ '''
+## import pdb
+## pdb.set_trace()
+ mname, newmod = apply(buildModule, _packageName(modulename)+(1,) ) # 1 signals reconstruct
+ reconstruct_classes( newmod, classbuilder )
+ return newmod
+
+def reconstruct_classes( module, constructors ):
+ '''
+ Put a class back together from the tuple of values
+ created by deconstruct_class.
+ '''
+ classes = []
+ import pprint
+ pprint.pprint( constructors)
+ for constructor in constructors:
+ if len (constructor) == 2:
+ module, name = constructor
+ # this is a standard class, re-import
+ temporarymodule = __import__(
+ module,
+ {},{},
+ string.split(module)+[name]
+ )
+ classobject =getattr (temporarymodule, name)
+ else:
+ # this is a class which needs to be re-constructed
+ (name, bases,namedarguments) = constructor
+ bases = tuple( reconstruct_classes( module, bases ))
+ classobject = apply (
+ buildClassIn,
+ (module, name, bases), # name and bases are the args to the class constructor along with the dict contents in namedarguments
+ namedarguments,
+ )
+ classes.append (classobject)
+ return classes
+
+
+def destroy(tempmodule):
+ '''
+ Destroy the module to allow the system to do garbage collection
+ on it. I'm not sure that the system really does do gc on modules,
+ but one would hope :)
+ '''
+ name = tempmodule.__name__
+ tempmodule.__dict__.clear() # clears references to the classes
+ try:
+ del(sys.modules[name])
+ except KeyError:
+ pass
+ packagename, modname = _packageName(name)
+ try:
+ delattr(sys.modules[ packagename ], modname)
+ except AttributeError:
+ pass
+ del( tempmodule ) # no, I don't see any reason to do it...
+ return None
+
+
+def deepcopy(templatemodule, packagename=None, basename=None):
+ '''
+ Rebuild the whole Module and it's included classes
+ (just the classes). Note: This will _not_ make instances
+ based on the old classes point to the new classes!
+ The value of this function is likely to be minimal given
+ this restriction. For pickling use deconstruct/reconstruct
+ for simple copying just return the module.
+ '''
+ name, classbuilder = deconstruct( templatemodule )
+ if packagename is None:
+ tp, tb = _packageName( name )
+ if packagename is None:
+ packagename = tp
+ if basename is None:
+ basename = tb
+ newmod = buildModule(packagename, basename, initialcontents=classbuilder )
+ return newmod
+
+if __name__ == "__main__":
+ def testPickle ():
+ import mcf.vrml.prototype
+ name, module = buildModule( 'mcf.vrml.temp', 'scenegraph' )
+ buildClassIn( module, 'this', () )
+ buildClassIn( module, 'that', (mcf.vrml.prototype.ProtoTypeNode,) )
+## import pdb
+## pdb.set_trace()
+ import pprint
+ pprint.pprint( deconstruct( module ))
+ name,builder = deconstruct( module )
+ destroy( module)
+ return reconstruct(name, builder)
+ t = testPickle()
+ print t
+
+
+ \ No newline at end of file
diff --git a/intern/python/modules/mcf/utils/typeclasses.py b/intern/python/modules/mcf/utils/typeclasses.py
new file mode 100644
index 00000000000..ed798dfe3da
--- /dev/null
+++ b/intern/python/modules/mcf/utils/typeclasses.py
@@ -0,0 +1,50 @@
+'''
+Classes of Types
+
+Often you want to be able to say:
+ if type(obj) in MutableTypes:
+ yada
+
+This module is intended to make that easier.
+Just import and use :)
+'''
+import types
+
+MutableTypes = [ types.ListType, types.DictType, types.InstanceType ]
+MutableSequenceTypes = [ types.ListType ]
+SequenceTypes = [ types.ListType, types.StringType, types.TupleType ]
+NumericTypes = [ types.IntType, types.FloatType, types.LongType, types.ComplexType ]
+MappingTypes = [ types.DictType ]
+
+def regarray():
+ if globals().has_key('array'):
+ return 1
+ try:
+ import array
+ SequenceTypes.append( array.ArrayType )
+ MutableTypes.append( array.ArrayType )
+ MutableSequenceTypes.append( array.ArrayType )
+ return 1
+ except ImportError:
+ return 0
+
+def regnumpy():
+ '''
+ Call if you want to register numpy arrays
+ according to their types.
+ '''
+ if globals().has_key('Numeric'):
+ return 1
+ try:
+ import Numeric
+ SequenceTypes.append( Numeric.ArrayType )
+ MutableTypes.append( Numeric.ArrayType )
+ MutableSequenceTypes.append( Numeric.ArrayType )
+ return 1
+ except ImportError:
+ return 0
+
+# for now, I'm going to always register these, if the module becomes part of the base distribution
+# it might be better to leave it out so numpy isn't always getting loaded...
+regarray()
+regnumpy() \ No newline at end of file
diff --git a/intern/python/modules/mcf/utils/userquery.py b/intern/python/modules/mcf/utils/userquery.py
new file mode 100644
index 00000000000..e1dc1bfeda5
--- /dev/null
+++ b/intern/python/modules/mcf/utils/userquery.py
@@ -0,0 +1,17 @@
+import string
+
+def userquery( prompt, choices, contextdata = '', defaultind=0 ):
+ if contextdata:
+ print 'Contextual Information:', contextdata
+ for x in range( len( choices ) ):
+ print '(%s)'%x, `choices[x]`
+ choice = raw_input( prompt+( '(%s):'%defaultind ) )
+ if not choice:
+ return choices[ defaultind ]
+ try:
+ choice = string.atoi( choice )
+ return choices[ choice]
+ except IndexError :
+ return choices[ defaultind ]
+ except ValueError:
+ return choice
diff --git a/intern/python/modules/mcf/utils/ver.py b/intern/python/modules/mcf/utils/ver.py
new file mode 100644
index 00000000000..1d36fcd122a
--- /dev/null
+++ b/intern/python/modules/mcf/utils/ver.py
@@ -0,0 +1,17 @@
+'''
+Module giving a float representation
+of the interpreter major version (1.4, 1.5 etceteras)
+
+ver -- Float representation of the current interpreter version
+
+Note: Since I no longer have any Python 1.4 modules, this module is
+no longer in use by me. I intend to leave it here for the next version
+jump :) .
+'''
+import regex, sys, string
+ver = string.atof(sys.version[:regex.match('[0-9.]*', sys.version)])
+
+### Clean up namespace
+del(regex)
+del(sys)
+del(string)
diff --git a/intern/python/modules/mcf/utils/walkerable.py b/intern/python/modules/mcf/utils/walkerable.py
new file mode 100644
index 00000000000..e4c18302097
--- /dev/null
+++ b/intern/python/modules/mcf/utils/walkerable.py
@@ -0,0 +1,46 @@
+'''
+Really simplistic walker-processable hierobjects, doesn't
+have parent attributes, every element has an __attrDict__
+item and a childlist. This is different from the mechanisms
+we'll want to use for multi-tree systems, but it's fairly
+close. Should be fairly simply worked with.
+'''
+class WalkerAble:
+ '''
+ Simple hierarchic objects with the following elements
+
+ __attrDict__ -- app-specific attributes
+ __childlist__ -- childen of this node
+ __gi__ -- "type" or Generic Indicator of this node
+ __childlist__append__ -- as you'd expect, method on childlist to add an element
+ '''
+ def __init__(self, childlist=None, attrDict=None, gi=None):
+ self.__dict__['__attrDict__'] = attrDict or {}
+ self.__dict__['__childlist__'] = childlist or []
+ self.__dict__['__gi__'] = gi or ''
+ self.__dict__['__childlist__append__'] = self.__childlist__.append
+
+ def __getattr__(self, attrName):
+ '''
+ Note: you can store attributes with the same names as
+ the reserved names, but to get them back, you'll need
+ to read it directly out of the attrDict
+ '''
+ if attrName != '__attrDict__':
+ try:
+ return self.__attrDict__[attrName]
+ except KeyError:
+ pass
+ raise AttributeError, attrName
+
+ def __setattr__(self, attrName, attrVal):
+ self.__attrDict__[attrName] = attrVal
+ def __setGI__(self, gi):
+ self.__dict__['__gi__'] = gi
+ def __repr__(self):
+ return '''<WalkerAble %(__gi__)s %(__attrDict__)s %(__childlist__)s>'''%self.__dict__
+
+ # copy functions
+# def __getinitargs__(self):
+# return (self.__childlist__, self.__attrDict__, self.__gi__)
+
diff --git a/intern/python/modules/simpleparse/__init__.py b/intern/python/modules/simpleparse/__init__.py
new file mode 100644
index 00000000000..961871c36a5
--- /dev/null
+++ b/intern/python/modules/simpleparse/__init__.py
@@ -0,0 +1,5 @@
+'''
+Simple parsing using mxTextTools
+
+tar -cvf simpleparse.tar --exclude-from=exclude.txt
+''' \ No newline at end of file
diff --git a/intern/python/modules/simpleparse/bootstrap.py b/intern/python/modules/simpleparse/bootstrap.py
new file mode 100644
index 00000000000..65274bb03ee
--- /dev/null
+++ b/intern/python/modules/simpleparse/bootstrap.py
@@ -0,0 +1,279 @@
+
+from TextTools.TextTools import *
+
+#####################################################
+# FOLLOWING IS THE BOOTSTRAP PARSER, HAND-CODED!
+
+parsernamelist = [
+'declarationset', # 0
+'declaration', # 1
+'implicit_group', # 2 --> no longer used
+'added_token', # 3
+'seq_added_token', #4
+'fo_added_token', #5
+'or_added_token', #6
+'and_added_token', #7
+'element_token', #8
+'group', #9
+'negpos_indicator', #10
+'occurence_indicator', #11
+'unreportedname', #12
+'name', #13
+'<ts>', # 14
+'literal', #15
+'range', # 16
+'CHARBRACE', #17
+'CHARDASH', # 18
+'CHARRANGE', # 19
+'CHARNOBRACE', # 20
+'ESCAPEDCHAR', # 21
+'SPECIALESCAPEDCHAR', # 22
+'OCTALESCAPEDCHAR' # 23
+]
+
+parsertuplelist = range( 24 )
+
+
+
+parsertuplelist[0] = ( # declarationset
+ ('declaration', TableInList,(parsertuplelist, 1)), # must be at least one declaration
+ ('declaration', TableInList,(parsertuplelist, 1),1,0)
+)
+parsertuplelist[1] = ( # declaration
+ (None, TableInList,(parsertuplelist, 14)), # ts
+ (None, SubTable, (
+ ('unreportedname', TableInList,(parsertuplelist, 12),1,2),
+ ('name', TableInList,(parsertuplelist, 13)), # name
+ )
+ ),
+ (None, TableInList,(parsertuplelist, 14)), # ts
+ (None, Word, ':='),
+ (None, TableInList,(parsertuplelist, 14)), # ts
+ ('element_token', TableInList,(parsertuplelist, 8)),
+ (None, SubTable, ( # added_token
+ ('seq_added_token', TableInList, (parsertuplelist,4), 1, 5 ),
+ ('fo_added_token', TableInList, (parsertuplelist,5), 1, 4 ),
+ ('or_added_token', TableInList, (parsertuplelist,6), 1, 3 ),
+ ('and_added_token', TableInList, (parsertuplelist,7), 1, 2 ),
+ (None, Fail, Here),
+ ('seq_added_token', TableInList, (parsertuplelist,4), 1, 0 ),
+ ('fo_added_token', TableInList, (parsertuplelist,5), 1, -1 ),
+ ('or_added_token', TableInList, (parsertuplelist,6), 1, -2 ),
+ ('and_added_token', TableInList, (parsertuplelist,7), 1, -3 ),
+ ),1,1),
+ (None, TableInList,(parsertuplelist, 14)), # ts
+)
+parsertuplelist[3] = ( # added_token
+ ('seq_added_token', TableInList, (parsertuplelist,4), 1, 5 ),
+ ('fo_added_token', TableInList, (parsertuplelist,5), 1, 4 ),
+ ('or_added_token', TableInList, (parsertuplelist,6), 1, 3 ),
+ ('and_added_token', TableInList, (parsertuplelist,7), 1, 2 ),
+ (None, Fail, Here),
+ ('seq_added_token', TableInList, (parsertuplelist,4), 1, 0 ),
+ ('fo_added_token', TableInList, (parsertuplelist,5), 1, -1 ),
+ ('or_added_token', TableInList, (parsertuplelist,6), 1, -2 ),
+ ('and_added_token', TableInList, (parsertuplelist,7), 1, -3 ),
+)
+parsertuplelist[4] = ( # seq_added_token
+ (None, TableInList,(parsertuplelist, 14)), # ts
+ (None, Is, ','),
+ (None, TableInList,(parsertuplelist, 14)), # ts
+ ('element_token', TableInList,(parsertuplelist, 8)),
+ (None, TableInList,(parsertuplelist, 14),4,1), # ts
+ (None, Is, ',',3,1),
+ (None, TableInList,(parsertuplelist, 14),2,1), # ts
+ ('element_token', TableInList,(parsertuplelist, 8),1,-3),
+)
+parsertuplelist[5] = ( # fo_added_token
+ (None, TableInList,(parsertuplelist, 14)), # ts
+ (None, Is, '/'),
+ (None, TableInList,(parsertuplelist, 14)), # ts
+ ('element_token', TableInList,(parsertuplelist, 8)),
+ (None, TableInList,(parsertuplelist, 14),4,1), # ts
+ (None, Is, '/',3,1),
+ (None, TableInList,(parsertuplelist, 14),2,1), # ts
+ ('element_token', TableInList,(parsertuplelist, 8),1,-3),
+)
+parsertuplelist[6] = ( # or_added_token
+ (None, TableInList,(parsertuplelist, 14)), # ts
+ (None, Is, '|'),
+ (None, TableInList,(parsertuplelist, 14)), # ts
+ ('element_token', TableInList,(parsertuplelist, 8)),
+ (None, TableInList,(parsertuplelist, 14),4,1), # ts
+ (None, Is, '|',3,1),
+ (None, TableInList,(parsertuplelist, 14),2,1), # ts
+ ('element_token', TableInList,(parsertuplelist, 8),1,-3),
+)
+parsertuplelist[7] = ( # and_added_token
+ (None, TableInList,(parsertuplelist, 14)), # ts
+ (None, Is, '&'),
+ (None, TableInList,(parsertuplelist, 14)), # ts
+ ('element_token', TableInList,(parsertuplelist, 8)),
+ (None, TableInList,(parsertuplelist, 14),4,1), # ts
+ (None, Is, '&',3,1),
+ (None, TableInList,(parsertuplelist, 14),2,1), # ts
+ ('element_token', TableInList,(parsertuplelist, 8),1,-3),
+)
+parsertuplelist[8] = ( # element_token
+ ('negpos_indicator', TableInList,(parsertuplelist, 10),1,1),
+ (None, TableInList,(parsertuplelist, 14),1,1), # ts, very inefficient :(
+ ('literal', TableInList, (parsertuplelist,15),1, 4 ),
+ ('range', TableInList, (parsertuplelist,16),1, 3 ),
+ ('group', TableInList, (parsertuplelist,9),1, 2 ),
+ ('name', TableInList, (parsertuplelist,13) ),
+ (None, TableInList,(parsertuplelist, 14),1,1), # ts, very inefficient :(
+ ('occurence_indicator', TableInList,(parsertuplelist, 11), 1,1),
+)
+parsertuplelist[9] = ( # group
+ (None, Is, '('),
+ (None, TableInList,(parsertuplelist, 14),1,1), # ts
+ ('element_token', TableInList, (parsertuplelist,8) ),
+ (None, SubTable, ( # added_token
+ ('seq_added_token', TableInList, (parsertuplelist,4), 1, 5 ),
+ ('fo_added_token', TableInList, (parsertuplelist,5), 1, 4 ),
+ ('or_added_token', TableInList, (parsertuplelist,6), 1, 3 ),
+ ('and_added_token', TableInList, (parsertuplelist,7), 1, 2 ),
+ (None, Fail, Here),
+ ('seq_added_token', TableInList, (parsertuplelist,4), 1, 0 ),
+ ('fo_added_token', TableInList, (parsertuplelist,5), 1, -1 ),
+ ('or_added_token', TableInList, (parsertuplelist,6), 1, -2 ),
+ ('and_added_token', TableInList, (parsertuplelist,7), 1, -3 ),
+ ),1,1),
+ (None, TableInList,(parsertuplelist, 14),1,1), # ts
+ (None, Is, ')'),
+)
+parsertuplelist[10] = ( # negpos_indicator
+ (None, Is, "+",1,2),
+ (None, Is, "-"),
+)
+parsertuplelist[11] = ( #occurence_indicator
+ (None, Is, "+",1,3),
+ (None, Is, "*",1,2),
+ (None, Is, '?'),
+)
+parsertuplelist[12] = ( #unreportedname
+ (None, Is, '<'),
+ ('name', TableInList, (parsertuplelist, 13)), # inefficiency in final system :(
+ (None, Is, '>'),
+)
+parsertuplelist[13] = ( # name
+ (None, IsIn, alpha+'_'),
+ (None, AllIn, alphanumeric+'_',1,1)
+)
+
+parsertuplelist[14] = ( # ts (whitespace)
+ (None, AllIn, ' \011\012\013\014\015',1,1),
+ (None, SubTable, (
+ (None, Is, '#' ),
+ (None, AllNotIn, '\n',1,1 ) # problem if there's a comment at the end of the file :(
+ )
+ ,1,-1 ),
+ )
+# this isn't actually used in the bootstrap parser...
+_specialescapedchar = parsertuplelist[22] = ( # SPECIALESCAPEDCHAR
+ ('SPECIALESCAPEDCHAR', IsIn, '\\abfnrtv'),
+)
+_octalescapechar = parsertuplelist[23] = ( # OCTALESCAPEDCHAR
+ (None, IsIn, '01234567'),
+ (None, IsIn, '01234567',2),
+ (None, IsIn, '01234567',1),
+)
+_escapedchar = parsertuplelist[21] = ( # escapedcharacter
+ (None, Is, '\\' ),
+ ('SPECIALESCAPEDCHAR', IsIn, '\\abfnrtv',1,4),
+ ('OCTALESCAPEDCHAR', SubTable, _octalescapechar)
+)
+
+_charnobrace = parsertuplelist[20] = ( # charnobrace
+ ('ESCAPEDCHAR', Table, _escapedchar, 1,2),
+ ('CHAR', IsNot, ']'),
+)
+_rangedef = parsertuplelist[19] = ( # charrange
+ ('CHARNOBRACE', Table, _charnobrace ),
+ (None, Is, '-'),
+ ('CHARNOBRACE', Table, _charnobrace ),
+)
+
+
+parsertuplelist[16] = ( #range
+ (None, Is, '['),
+ ('CHARBRACE', Is, ']',1,1),
+ ('CHARDASH', Is, '-',1,1),
+ ('CHARRANGE', Table, _rangedef, 1,0),
+ (None, SubTable, _charnobrace, 1,-1),
+ (None, Is, ']')
+)
+
+_sqstr = (
+ (None, Is, "'" ),
+# (None, Is, "'",1, 5 ), # immediate close
+ (None, AllNotIn, "\\'",1,1 ), # all not an escape or end
+ (None, Is, "\\", 2, 1), # is an escaped char
+ (None, Skip, 1, 1, -2), # consume the escaped char and loop back
+ (None, Is, "'" ) # in case there was no matching ', which would also cause a fail for allnotin
+ )
+_dblstr = (
+ (None, Is, '"' ),
+# (None, Is, '"',1, 5 ), # immediate close
+ (None, AllNotIn, '\\"' ,1,1), # not an escaped or end
+ (None, Is, "\\", 2, 1), # is an escaped char
+ (None, Skip, 1, 1, -2), # consume the escaped char and loop back
+ (None, Is, '"' ) # in case there was no matching ", which would also cause a fail for allnotin
+ )
+
+
+
+# literal := ("'",(CHARNOSNGLQUOTE/ESCAPEDCHAR)*,"'") / ('"',(CHARNODBLQUOTE/ESCAPEDCHAR)*,'"')
+
+parsertuplelist[15] = ( # literal
+ (None, Is, "'", 4, 1 ),
+ ('CHARNOSNGLQUOTE', AllNotIn, "\\'",1,1 ), # all not an escape or end
+ ('ESCAPEDCHAR', Table, _escapedchar, 1, -1),
+ (None, Is, "'", 1,5 ),
+ (None, Is, '"' ),
+ ('CHARNODBLQUOTE', AllNotIn, '\\"',1,1 ), # all not an escape or end
+ ('ESCAPEDCHAR', Table, _escapedchar, 1, -1),
+ (None, Is, '"'),
+)
+
+declaration = r'''declarationset := declaration+
+declaration := ts , (unreportedname/name) ,ts,':=',ts, element_token, ( seq_added_token / fo_added_token / or_added_token / and_added_token )*, ts
+seq_added_token := (ts,',',ts, element_token)+
+fo_added_token := (ts,'/',ts, element_token)+
+or_added_token := (ts,'|',ts, element_token)+ # not currently supported
+and_added_token := (ts,'&',ts, element_token)+ # not currently supported
+element_token := negpos_indicator?, ts, (literal/range/group/name),ts, occurence_indicator?
+group := '(',ts, element_token, ( seq_added_token / fo_added_token / or_added_token / and_added_token )*, ts, ')'
+
+negpos_indicator := '+'/'-'
+occurence_indicator := '+'/'*'/'?'
+unreportedname := '<', name, '>'
+name := [a-zA-Z_],[a-zA-Z0-9_]*
+<ts> := ( [ \011-\015]+ / ('#',-'\n'+,'\n')+ )*
+literal := ("'",(CHARNOSNGLQUOTE/ESCAPEDCHAR)*,"'") / ('"',(CHARNODBLQUOTE/ESCAPEDCHAR)*,'"')
+
+
+range := '[',CHARBRACE?,CHARDASH?, (CHARRANGE/CHARNOBRACE)*, CHARDASH?,']'
+CHARBRACE := ']'
+CHARDASH := '-'
+CHARRANGE := CHARNOBRACE, '-', CHARNOBRACE
+CHARNOBRACE := ESCAPEDCHAR/CHAR
+CHAR := -[]]
+ESCAPEDCHAR := '\\',( SPECIALESCAPEDCHAR / OCTALESCAPEDCHAR )
+SPECIALESCAPEDCHAR := [\\abfnrtv]
+OCTALESCAPEDCHAR := [0-7],[0-7]?,[0-7]?
+CHARNODBLQUOTE := -[\\"]+
+CHARNOSNGLQUOTE := -[\\']+
+'''
+
+def parse( instr = declaration, parserelement = 'declarationset' ):
+ tbl = (
+ (parserelement, Table, parsertuplelist[parsernamelist.index( parserelement )] ),
+ )
+ return tag( instr, tbl)
+
+if __name__ == '__main__':
+ import sys, pprint
+ pprint.pprint( apply( parse, tuple( sys.argv[1:] ) ) )
+
+
diff --git a/intern/python/modules/simpleparse/generator.py b/intern/python/modules/simpleparse/generator.py
new file mode 100644
index 00000000000..67f83106dfe
--- /dev/null
+++ b/intern/python/modules/simpleparse/generator.py
@@ -0,0 +1,432 @@
+from TextTools.TextTools import *
+import bootstrap # the hand-coded parser
+import operator, strop as string
+
+def err( value ):
+ print value
+
+class _BaseGenerator:
+ '''
+ Class providing the functions required to turn a
+ parse tree as generated by the bootstrap parser into
+ a new set of parser tuples. I.e a parser generator :)
+ Effectively this is the bootstrap generator.
+ '''
+ def __init__( self, syntaxstring = bootstrap.declaration, parserelement = 'declarationset' ):
+ '''
+ Turn syntaxstring into a parsetree using
+ the bootstrap module's parse command
+ '''
+ # should do some error checking in here :)
+ self.syntaxstring = syntaxstring
+ self.parsetree = bootstrap.parse( syntaxstring, parserelement )[1][0] # the child list
+ self.nameset = []
+ self.tupleset = []
+ def stringval( self, tuple ):
+ '''
+ Return the string value for a parse-result tuple
+ '''
+ return self.syntaxstring[ tuple[1]:tuple[2] ]
+ def build( self, prebuiltnodes=() ):
+ '''
+ Build a new parsing table from the syntax string.
+ New parsers may be accessed using the parserbyname method.
+
+ The pre-built nodes are parsing tables for inclusion in the grammar
+ Added version 1.0.1 to provide greater extensibility.
+ '''
+ # first register all declared names to reserve their indicies
+ #if self.__class__.__name__ == 'Generator':
+ # import pdb
+ # pdb.set_trace()
+ for key, value in prebuiltnodes:
+ self.nameset.append( key )
+ self.tupleset.append( value )
+ for decl in self.parsetree[3]:
+ #print decl
+ name = self.stringval( decl[3][0] )
+ self.nameset.append( name )
+ self.tupleset.append( None)
+ #print 'Declared names:',self.nameset
+ for i in range( len( self.nameset)):
+ #print '''Processing declaration %s '''% self.nameset[i]
+ dataset = self.group( ('group',1,2, self.parsetree[3][i][3][1:]), self )
+ if dataset:
+ self.tupleset[i] = tuple( dataset)
+ def parserbyname( self, name ):
+ '''
+ Retrieve a single parsing tuple by its production name
+ '''
+ try:
+ return self.tupleset[ self.nameset.index( name ) ]
+ except ValueError:
+ print '''Could not find parser tuple of name''', name
+ return ()
+ def allparsers (self):
+ '''
+ Return a list of (productionname, parsingtuple) values
+ suitable for passing to another generator as its pre-calculated
+ set of parsing tuples. (See method build)
+ '''
+ returnvalue = []
+ for i in range(len( self.nameset)):
+ returnvalue.append ( (self.nameset[i],self.tupleset[i]) )
+ return returnvalue
+ ### Actual processing functions...
+ def element_token( self, eltup, genobj, reportname=None ):
+ # Determine the type of element
+ # Descry the various options for the element
+ negative = optional = repeating = element = None
+ for data in eltup[3]:
+ if data[0] == 'negpos_indicator':
+ if genobj.stringval ( data ) == '-':
+ negative = 1
+ elif data[0] == 'occurence_indicator':
+ data = genobj.stringval ( data )
+ if data == '*':
+ optional = 1
+ repeating = 1
+ elif data == '+':
+ repeating = 1
+ elif data == '?':
+ optional = 1
+ else:
+ err( 'Unknown occurence indicator '+ data )
+ else:
+ element = data
+ # call the appropriate handler
+ try:
+ return getattr( self, element [0])( element, genobj, negative, repeating, optional)
+ except AttributeError,x:
+ err( '''Didn't find handler for element type %s, parser build aborted'''%element [0])
+ raise x
+
+ def group( self, els, genobj, negative= None, repeating=None, optional = None, reportname=None):
+ '''
+ Determine what type of group we're dealing with and determine what
+ function to call, then call it.
+ '''
+ groupset = els[3]
+ # groupset is an element_token followed by a possible added_token
+ if groupset:
+ els = []
+ els.append( groupset[0] )
+ if len(groupset) > 1:
+ els[len(els):] = groupset[1][3]
+ gtype = groupset[1][0]
+ if gtype == 'seq_added_token':
+ return self.seq( els, genobj, negative, repeating, optional, reportname )
+ elif gtype == 'fo_added_token':
+ return self.fo( els, genobj, negative, repeating, optional, reportname )
+ else:
+ err( '''An as-yet undefined group type was used! %s'''%gtype )
+ else: # default "sequence" of one... could do more work and make it process the results specifically, but that's optimisation ;)
+ return self.seq( els, genobj, negative, repeating, optional, None )
+ else:
+ return []
+
+
+ def seq( self, els, genobj, negative= None, repeating=None, optional = None, reportname=None ):
+ elset = map( self.element_token, els, [genobj]*len( els) )
+ elset = reduce( operator.add, elset )
+ if negative:
+ if repeating:
+ if optional:
+ return [(None, SubTable, (( None, SubTable,( (None, SubTable, tuple( elset), 2,1), (None, Fail, Here),(None,Skip,1) ), 2,1 ), ( None, EOF, Here, -1,1 ), ), ), ]
+ else: # not optional
+ return [(None, SubTable, (( None, SubTable,( (None, SubTable, tuple( elset), 2,1), (None, Fail, Here),(None,Skip,1) )), ( None, SubTable,( (None, SubTable, tuple( elset), 2,1), (None, Fail, Here),(None,Skip,1) ), 2,1 ), ( None, EOF, Here, -1,1 ), ), ), ]
+ else: # single
+ if optional:
+ return [ (None, SubTable, ( (None, SubTable, tuple( elset), 2,1), (None, Fail, Here), (None, Skip, 1) ),1,1) ]
+ else: # not optional
+ return [ (None, SubTable, ( (None, SubTable, tuple( elset), 2,1), (None, Fail, Here), (None, Skip, 1) )) ]
+ else: # positive
+ if repeating:
+ if optional:
+ return [ (None, SubTable, tuple( elset), 1,0) ]
+ else: # not optional
+
+ return [ (None, SubTable, tuple( elset)), (None, SubTable, tuple( elset), 1,0) ]
+ else: # single
+ if optional:
+ return [ (None, SubTable, tuple( elset), 1,1) ]
+ else: # not optional
+ return [ (None, SubTable, tuple( elset)) ]
+
+ def fo( self, els, genobj, negative= None, repeating=None, optional = None, reportname=None ):
+ elset = map( self.element_token, els, [genobj]*len( els) )
+ elset = reduce( operator.add, elset )
+ elset = []
+ for el in els:
+ dataset = self.element_token( el, genobj )
+ if len( dataset) == 1 and len(dataset[0]) == 3: # we can alter the jump states with impunity
+ elset.append( dataset[0] )
+ else: # for now I'm eating the inefficiency and doing an extra SubTable for all elements to allow for easy calculation of jumps within the FO group
+ elset.append( (None, SubTable, tuple( dataset )) )
+ if negative:
+ # all negative FO's have the meaning "a positive, single, non-optional FO not matching"
+ # the flags modify how failure and continuation are handled in that case, so they can use
+ # the same procset.
+ # Note: Negative FO groups are _very_ heavy, they have normally about 4 subtable calls
+ # guess we'll find out how well mxTextTools handles recursive tables :)
+ procset = []
+ for i in range( len( elset) -1): # note that we have to treat last el specially
+ ival = elset[i] + (1,len(elset)-i)
+ procset.append( ival ) # if success, jump past end
+ procset.append( elset[-1] + (2,1) ) # will cause a failure if last element doesn't match
+ procset.append( (None, Fail, Here ) )
+ procset.append( (None, Skip, 1) )
+ # if the following looks familiar you probably looked at seq above
+ if repeating:
+ if optional:
+ return [ (None, SubTable, ( (None, SubTable, tuple( procset), 2,1), (None, EOF, Here,-1,1) ) ) ]
+ else: # not optional
+ return [ (None, SubTable, ( (None, SubTable, tuple( procset)),(None, SubTable, tuple( procset), 2,1), (None, EOF, Here,-1,1) ) ) ]
+ else: # single
+ if optional:
+ return [ (None, SubTable, tuple( procset), 1,1) ]
+ else: # not optional
+ return [ (None, SubTable, tuple( procset) ) ]
+ else: # positive
+ if repeating:
+ if optional:
+ procset = []
+ for i in range( len( elset)):
+ procset.append( elset[i] + (1,-i) ) # if success, go back to start which is -i elements back
+ return procset
+ else: # not optional
+ procset = []
+ for i in range( len( elset)-1):
+ procset.append( elset[i] + (1, len(elset)-i+1) ) # if success, jump to later section
+ procset.append( elset[-1] + ( 1, 2) ) # will cause a failure if last element doesn't match using an explicit fail command
+ procset.append( (None, Fail, Here) ) # will cause a failure if last element doesn't match using an explicit fail command
+ for i in range( len( elset)-1):
+ procset.append( elset[i] + (1, -i) ) # if success, go back to start which is -i elements back
+ procset.append( elset[-1] + ( 1, 1-(len(elset)) ) ) # will cause a failure if last element doesn't match using an explicit fail command
+ return procset
+ else: # single
+ if optional:
+ procset = []
+ for i in range( len( elset)):
+ procset.append( elset[i] + (1,len(elset)-i) ) # if success, jump past end
+ return procset
+ else: # not optional
+ procset = []
+ for i in range( len( elset) -1): # note that we have to treat last el specially
+ procset.append( elset[i] + (1,len(elset)-i) ) # if success, jump past end
+ procset.append( elset[-1] ) # will cause a failure if last element doesn't match
+ return procset
+
+ def name( self, value, genobj, negative = None, repeating = None, optional = None, reportname=None ):
+ svalue = genobj.stringval( value )
+ try:
+ sindex = genobj.nameset.index( svalue )
+ except ValueError: # eeps, a value not declared
+ try:
+ sindex = genobj.nameset.index( '<'+svalue+'>' )
+ svalue = None
+ except ValueError:
+ err( '''The name %s could not be found in the declarationset. The parser will not compile.'''%svalue)
+ genobj.nameset.append( svalue )
+ genobj.tupleset.append( None )
+ sindex = len( genobj.nameset) - 1
+ if negative:
+ if repeating:
+ if optional:
+ return [ (svalue, SubTable, ( (None, TableInList, (genobj.tupleset, sindex), 1,3), (None, EOF, Here,1,2), (None,Skip,1,-2,-2) ) ) ]
+ else: # not optional
+ return [ (svalue, SubTable, ( (None, TableInList, (genobj.tupleset, sindex),2,1),(None, Fail, Here),(None, Skip, 1), (None, TableInList, (genobj.tupleset, sindex), 1,3), (None, EOF, Here,1,2), (None,Skip,1,-2,-2) ) ) ]
+ else: # single
+ if optional:
+ return [ (None, SubTable, ( (None, TableInList, (genobj.tupleset, sindex),2,1),(None, Fail, Here),(svalue, Skip, 1) ),1,1) ]
+ else: # not optional
+ return [ (None, SubTable, ( (None, TableInList, (genobj.tupleset, sindex),2,1),(None, Fail, Here),(svalue, Skip, 1) )) ]
+ else: # positive
+ if repeating:
+ if optional:
+ return [ (svalue, TableInList, (genobj.tupleset, sindex), 1,0) ]
+ else: # not optional
+ return [ (svalue, TableInList, (genobj.tupleset, sindex)), (svalue, TableInList, (genobj.tupleset, sindex),1,0) ]
+ else: # single
+ if optional:
+ return [ (svalue, TableInList, (genobj.tupleset, sindex), 1,1) ]
+ else: # not optional
+ return [ (svalue, TableInList, (genobj.tupleset, sindex)) ]
+ specialescapedmap = {
+ 'a':'\a',
+ 'b':'\b',
+ 'f':'\f',
+ 'n':'\n',
+ 'r':'\r',
+ 't':'\t',
+ 'v':'\v',
+ '\\':'\\',
+ '"':'"',
+ "'":"'",
+ }
+
+ def escapedchar( self, el, genobj ):
+ svalue = ''
+ if el[3][0][0] == 'SPECIALESCAPEDCHAR':
+ svalue = svalue + self.specialescapedmap[ genobj.stringval( el[3][0] ) ]
+ elif el[3][0][0] == 'OCTALESCAPEDCHAR':
+ #print 'OCTALESCAPEDCHAR', genobj.stringval( el)
+ ovnum = 0
+ ovpow = 0
+ ov = genobj.stringval( el[3][0] )
+ while ov:
+ ovnum = ovnum + int( ov[-1] ) * (8**ovpow)
+ ovpow = ovpow + 1
+ ov = ov[:-1]
+ svalue = svalue + chr( ovnum )
+ #print 'svalue ', `svalue`
+ return svalue
+
+
+ def literal( self, value, genobj, negative = None, repeating=None, optional=None, reportname=None ):
+ '''
+ Calculate the tag-table for a literal element token
+ '''
+ svalue = ''
+ for el in value[3]:
+ if el[0] in ('CHARNOSNGLQUOTE', 'CHARNODBLQUOTE'):
+ svalue = svalue+genobj.stringval( el )
+ elif el[0] == 'ESCAPEDCHAR':
+ svalue = svalue + self.escapedchar( el, genobj )
+ #print 'literal value', `genobj.stringval( value )`
+ #print ' svalue', `svalue`
+ # svalue = svalue[1:-1]
+ if negative:
+ if repeating: # a repeating negative value, a "search" in effect
+ if optional: # if fails, then go to end of file
+ return [ (None, sWordStart, BMS( svalue ),1,2), (None, Move, ToEOF ) ]
+ else: # must first check to make sure the current position is not the word, then the same
+ return [ (None, Word, svalue, 2,1),(None, Fail, Here),(None, sWordStart, BMS( svalue ),1,2), (None, Move, ToEOF ) ]
+ #return [ (None, Word, svalue, 2,1),(None, Fail, Here),(None, WordStart, svalue,1,2), (None, Move, ToEOF ) ]
+ else: # a single-character test saying "not a this"
+ if optional: # test for a success, move back if success, move one forward if failure
+ if len(svalue) > 1:
+ return [ (None, Word, svalue, 2,1),
+ (None, Skip, -len(svalue), 2,2), # backup if this was the word to start of word, succeed
+ (None, Skip, 1 ) ] # else just move one character and succeed
+ else: # Uses Is test instead of Word test, should be faster I'd imagine
+ return [ (None, Is, svalue, 2,1),
+ (None, Skip, -1, 2,2), # backtrack
+ (None, Skip, 1 ) ] # else just move one character and succeed
+ else: # must find at least one character not part of the word, so
+ if len(svalue) > 1:
+ return [ (None, Word, svalue, 2,1),
+ (None, Fail, Here),
+ (None, Skip, 1 ) ] # else just move one character and succeed
+ else: #must fail if it finds or move one forward
+ return [ (None, Is, svalue, 2,1),
+ (None, Fail, Here),
+ (None, Skip, 1 ) ] # else just move one character and succeed
+ else: # positive
+ if repeating:
+ if optional:
+ if len(svalue) > 1:
+ return [ (None, Word, svalue, 1,0) ]
+ else:
+ return [ (None, Is, svalue, 1,0) ]
+ else: # not optional
+ if len(svalue) > 1:
+ return [ (None, Word, svalue),(None, Word, svalue,1,0) ]
+ else:
+ return [ (None, Is, svalue),(None, Is, svalue,1,0) ]
+ else: # not repeating
+ if optional:
+ if len(svalue) > 1:
+ return [ (None, Word, svalue, 1,1) ]
+ else:
+ return [ (None, Is, svalue, 1,1) ]
+ else: # not optional
+ if len(svalue) > 1:
+ return [ (None, Word, svalue) ]
+ else:
+ return [ (None, Word, svalue) ]
+
+ def charnobrace( self, cval, genobj ):
+ #print 'cval', cval
+ if cval[3][0][0] == 'ESCAPEDCHAR':
+ return self.escapedchar( cval[3][0], genobj )
+ #print '''Straight non-brace character''', `genobj.stringval( cval[3][0] )`
+ return genobj.stringval( cval )
+ def range( self, value, genobj, negative = None, repeating=None, optional=None, reportname=None ):
+ dataset = []
+ for cval in value[3]:
+ if cval[0] == 'CHARBRACE':
+ dataset.append( ']')
+ elif cval[0] == 'CHARDASH':
+ dataset.append( '-')
+ elif cval[0] == 'CHARNOBRACE':
+ dataset.append( self.charnobrace( cval, genobj ) )
+ elif cval[0] == 'CHARRANGE':
+ start = ord( self.charnobrace( cval[3][0], genobj ) )
+ end = ord( self.charnobrace( cval[3][1], genobj ) )
+ if start < end:
+ dataset.append( string.join( map( chr, range( start, end +1 ) ), '' ) )
+ else:
+ dataset.append( string.join( map( chr, range( end, start +1 ) ), '' ) )
+ else:
+ dataset.append( genobj.stringval( cval ) )
+ if negative:
+ #svalue = set( string.join( dataset, '' ), 0 )
+ svalue = string.join( dataset, '' )
+ else:
+ #svalue = set( string.join( dataset, '' ), 1)
+ svalue = string.join( dataset, '' )
+ if negative:
+ if repeating:
+ if optional:
+ #return [ (None, AllInSet, svalue, 1 ) ]
+ return [ (None, AllNotIn, svalue, 1 ) ]
+ else: # not optional
+ #return [ (None, AllInSet, svalue ) ]
+ return [ (None, AllNotIn, svalue ) ]
+ else: # not repeating
+ if optional:
+ #return [ (None, IsInSet, svalue, 1 ) ]
+ return [ (None, IsNotIn, svalue, 1 ) ]
+ else: # not optional
+ #return [ (None, IsInSet, svalue ) ]
+ return [ (None, IsNotIn, svalue ) ]
+ else:
+ if repeating:
+ if optional:
+ #return [ (None, AllInSet, svalue, 1 ) ]
+ return [ (None, AllIn, svalue, 1 ) ]
+ else: # not optional
+ #return [ (None, AllInSet, svalue ) ]
+ return [ (None, AllIn, svalue ) ]
+ else: # not repeating
+ if optional:
+ #return [ (None, IsInSet, svalue, 1 ) ]
+ return [ (None, IsIn, svalue, 1 ) ]
+ else: # not optional
+ #return [ (None, IsInSet, svalue ) ]
+ return [ (None, IsIn, svalue ) ]
+
+class Generator( _BaseGenerator ):
+ def __init__( self, syntaxstring , parser ):
+ self.syntaxstring = syntaxstring
+ self.parsetree = [0,1,2, tag( syntaxstring, parser )[1] ]
+ self.nameset = []
+ self.tupleset = []
+
+def buildParser( declaration, prebuiltnodes=() ):
+ '''
+ End-developer function to create an application-specific parser
+ the parsing tuple is available on the returned object as
+ object.parserbyname( 'declaredname' ), where declaredname is the
+ name you defined in your language defintion file.
+
+ The declaration argument is the text of a language defintion file.
+ '''
+ proc = _BaseGenerator( )
+ proc.build()
+ newgen = Generator( declaration, proc.parserbyname( 'declarationset' ) )
+ newgen.build( prebuiltnodes=prebuiltnodes )
+ return newgen
+
+
diff --git a/intern/python/modules/util/README.txt b/intern/python/modules/util/README.txt
new file mode 100644
index 00000000000..60321531bd7
--- /dev/null
+++ b/intern/python/modules/util/README.txt
@@ -0,0 +1,13 @@
+3D utilities
+
+ (c) onk, 1998-2001
+
+ A few low level & math utilities for 2D/3D computations as:
+
+ - area.py: solving close packing problems in 2D
+
+ - vect.py: low level / OO like matrix and vector calculation module
+
+ - vectools.py: more vector tools for intersection calculation, etc.
+
+ - tree.py: binary trees (used by the BSPtree module)
diff --git a/intern/python/modules/util/__init__.py b/intern/python/modules/util/__init__.py
new file mode 100644
index 00000000000..ee6b0cef939
--- /dev/null
+++ b/intern/python/modules/util/__init__.py
@@ -0,0 +1,2 @@
+__all__ = ["vect", "vectools", "area", "quat", "blvect", "tree"]
+
diff --git a/intern/python/modules/util/quat.py b/intern/python/modules/util/quat.py
new file mode 100644
index 00000000000..d23b1c3f6d9
--- /dev/null
+++ b/intern/python/modules/util/quat.py
@@ -0,0 +1,109 @@
+"""Quaternion module
+
+ This module provides conversion routines between Matrices, Quaternions (rotations around
+ an axis) and Eulers.
+
+ (c) 2000, onk@section5.de """
+
+# NON PUBLIC XXX
+
+from math import sin, cos, acos
+from util import vect
+reload(vect)
+
+Vector = vect.Vector
+
+Matrix = vect.Matrix
+
+class Quat:
+ """Simple Quaternion class
+
+Usually, you create a quaternion from a rotation axis (x, y, z) and a given
+angle 'theta', defining the right hand rotation:
+
+ q = fromRotAxis((x, y, z), theta)
+
+This class supports multiplication, providing an efficient way to
+chain rotations"""
+
+ def __init__(self, w = 1.0, x = 0.0, y = 0.0, z = 0.0):
+ self.v = (w, x, y, z)
+
+ def asRotAxis(self):
+ """returns rotation axis (x, y, z) and angle phi (right hand rotation)"""
+ phi2 = acos(self.v[0])
+ if phi2 == 0.0:
+ return Vector(0.0, 0.0, 1.0), 0.0
+ else:
+ s = 1 / (sin(phi2))
+
+ v = Vector(s * self.v[1], s * self.v[2], s * self.v[3])
+ return v, 2.0 * phi2
+
+ def __mul__(self, other):
+ w1, x1, y1, z1 = self.v
+ w2, x2, y2, z2 = other.v
+
+ w = w1*w2 - x1*x2 - y1*y2 - z1*z2
+ x = w1*x2 + x1*w2 + y1*z2 - z1*y2
+ y = w1*y2 - x1*z2 + y1*w2 + z1*x2
+ z = w1*z2 + x1*y2 - y1*x2 + z1*w2
+ return Quat(w, x, y, z)
+
+ def asMatrix(self):
+ w, x, y, z = self.v
+
+ v1 = Vector(1.0 - 2.0 * (y*y + z*z), 2.0 * (x*y + w*z), 2.0 * (x*z - w*y))
+ v2 = Vector(2.0 * (x*y - w*z), 1.0 - 2.0 * (x*x + z*z), 2.0 * (y*z + w*x))
+ v3 = Vector(2.0 * (x*z + w*y), 2.0 * (y*z - w*x), 1.0 - 2.0 * (x*x + y*y))
+
+ return Matrix(v1, v2, v3)
+
+# def asEuler1(self, transp = 0):
+# m = self.asMatrix()
+# if transp:
+# m = m.transposed()
+# return m.asEuler()
+
+ def asEuler(self, transp = 0):
+ from math import atan, asin, atan2
+ w, x, y, z = self.v
+ x2 = x*x
+ z2 = z*z
+ tmp = x2 - z2
+ r = (w*w + tmp - y*y )
+ phi_z = atan2(2.0 * (x * y + w * z) , r)
+ phi_y = asin(2.0 * (w * y - x * z))
+ phi_x = atan2(2.0 * (w * x + y * z) , (r - 2.0*tmp))
+
+ return phi_x, phi_y, phi_z
+
+def fromRotAxis(axis, phi):
+ """computes quaternion from (axis, phi)"""
+ phi2 = 0.5 * phi
+ s = sin(phi2)
+ return Quat(cos(phi2), axis[0] * s, axis[1] * s, axis[2] * s)
+
+#def fromEuler1(eul):
+ #qx = fromRotAxis((1.0, 0.0, 0.0), eul[0])
+ #qy = fromRotAxis((0.0, 1.0, 0.0), eul[1])
+ #qz = fromRotAxis((0.0, 0.0, 1.0), eul[2])
+ #return qz * qy * qx
+
+def fromEuler(eul):
+ from math import sin, cos
+ e = eul[0] / 2.0
+ cx = cos(e)
+ sx = sin(e)
+ e = eul[1] / 2.0
+ cy = cos(e)
+ sy = sin(e)
+ e = eul[2] / 2.0
+ cz = cos(e)
+ sz = sin(e)
+
+ w = cx * cy * cz - sx * sy * sz
+ x = sx * cy * cz - cx * sy * sz
+ y = cx * sy * cz + sx * cy * sz
+ z = cx * cy * sz + sx * sy * cz
+ return Quat(w, x, y, z)
diff --git a/intern/python/modules/util/tree.py b/intern/python/modules/util/tree.py
new file mode 100644
index 00000000000..313159239c6
--- /dev/null
+++ b/intern/python/modules/util/tree.py
@@ -0,0 +1,215 @@
+# Basisklasse fuer Baumstruktur
+# Object-orientiertes Programmieren Wi/97
+#
+# (c) Martin Strubel, Fakultaet fuer Physik, Universitaet Konstanz
+# (strubi@gandalf.physik.uni-konstanz.de)
+
+# updated 08.2001
+
+"""Simple binary tree module
+
+ This module demonstrates a binary tree class.
+
+ Example::
+
+ a = [5, 8, 8, 3, 7, 9]
+ t1 = Tree()
+ t1.fromList(a)
+
+ Operations on tree nodes are done by writing a simple operator class::
+
+ class myOp:
+ def __init__(self):
+ ...
+ def operate(self, node):
+ do_something(node)
+
+ and calling the recursive application::
+
+ op = MyOp()
+ t1.recurse(op)
+
+ Objects inserted into the tree can be of any kind, as long as they define a
+ comparison operation.
+"""
+
+def recurse(node, do):
+ if node == None:
+ return
+ recurse(node.left, do)
+ do(node)
+ recurse(node.right, do)
+
+class Nullnode:
+ def __init__(self):
+ self.left = None
+ self.right = None
+ self.depth = 0
+
+ def recurse(self, do):
+ if self == Nil:
+ return
+ self.left.recurse(do)
+ do(self)
+ self.right.recurse(do)
+
+Nil = Nullnode()
+
+def nothing(x):
+ return x
+
+class Node(Nullnode):
+ def __init__(self, data = None):
+ self.left = Nil
+ self.right = Nil
+ self.data = data
+ self.depth = 0
+
+ def __repr__(self):
+ return "Node: %s" % self.data
+
+ def insert(self, node):
+ if node.data < self.data:
+ if self.left != Nil:
+ return self.left.insert(node)
+ else:
+ node.depth = self.depth + 1
+ self.left = node
+ # print "inserted left"
+ return self
+
+ elif node.data > self.data:
+ if self.right != Nil:
+ return self.right.insert(node)
+ else:
+ node.depth = self.depth + 1
+ self.right = node
+ # print "inserted right"
+ return self
+ else:
+ return self.insert_equal(node)
+
+ def find(self, node, do = nothing):
+ if node.data < self.data:
+ if self.left != Nil:
+ return self.left.find(node, do)
+ else:
+ return self
+ elif node.data > self.data:
+ if self.right != Nil:
+ return self.right.find(node, do)
+ else:
+ return self
+ else:
+ return do(self)
+
+ def remove(self, node):
+ newpar
+ return self
+ def insert_equal(self, node):
+ #print "insert:",
+ self.equal(node)
+ return self
+ def found_equal(self, node):
+ self.equal(node)
+ def equal(self, node):
+ # handle special
+ print "node (%s) is equal self (%s)" % (node, self)
+ def copy(self):
+ n = Node(self.data)
+ return n
+
+ def recursecopy(self):
+ n = Node()
+ n.data = self.data
+ n.flag = self.flag
+ if self.left != Nil:
+ n.left = self.left.recursecopy()
+ if self.right != Nil:
+ n.right = self.right.recursecopy()
+
+ return n
+
+class NodeOp:
+ def __init__(self):
+ self.list = []
+ def copy(self, node):
+ self.list.append(node.data)
+
+class Tree:
+ def __init__(self, root = None):
+ self.root = root
+ self.n = 0
+ def __radd__(self, other):
+ print other
+ t = self.copy()
+ t.merge(other)
+ return t
+ def __repr__(self):
+ return "Tree with %d elements" % self.n
+ def insert(self, node):
+ if self.root == None:
+ self.root = node
+ else:
+ self.root.insert(node)
+ self.n += 1
+ def recurse(self, do):
+ if self.root == None:
+ return
+ self.root.recurse(do)
+ def find(self, node):
+ return self.root.find(node)
+ def remove(self, node):
+ self.root.remove(node)
+ def copy(self):
+ "make true copy of self"
+ t = newTree()
+ c = NodeOp()
+ self.recurse(c.copy)
+ t.fromList(c.list)
+ return t
+ def asList(self):
+ c = NodeOp()
+ self.recurse(c.copy)
+ return c.list
+ def fromList(self, list):
+ for item in list:
+ n = Node(item)
+ self.insert(n)
+ def insertcopy(self, node):
+ n = node.copy()
+ self.insert(n)
+ def merge(self, other):
+ other.recurse(self.insertcopy)
+# EXAMPLE:
+
+newTree = Tree
+
+def printnode(x):
+ print "Element: %s, depth: %s" % (x, x.depth)
+
+def test():
+ a = [5, 8, 8, 3, 7, 9]
+ t1 = Tree()
+ t1.fromList(a)
+
+ b = [12, 4, 56, 7, 34]
+ t2 = Tree()
+ t2.fromList(b)
+
+ print "tree1:"
+ print t1.asList()
+ print "tree2:"
+ print t2.asList()
+ print '-----'
+ print "Trees can be added:"
+
+
+ t3 = t1 + t2
+ print t3.asList()
+ print "..or alternatively merged:"
+ t1.merge(t2)
+ print t1.asList()
+
+if __name__ == '__main__':
+ test()
diff --git a/intern/python/modules/util/vect.py b/intern/python/modules/util/vect.py
new file mode 100644
index 00000000000..3724079519b
--- /dev/null
+++ b/intern/python/modules/util/vect.py
@@ -0,0 +1,480 @@
+#------------------------------------------------------------------------------
+# simple 3D vector / matrix class
+#
+# (c) 9.1999, Martin Strubel // onk@section5.de
+# updated 4.2001
+#
+# This module consists of a rather low level command oriented
+# and a more OO oriented part for 3D vector/matrix manipulation
+#
+# For documentation, please look at the EXAMPLE code below - execute by:
+#
+# > python vect.py
+#
+#
+# permission to use in scientific and free programs granted
+# In doubt, please contact author.
+#
+# history:
+#
+# 1.5: Euler/Rotation matrix support moved here
+# 1.4: high level Vector/Matrix classes extended/improved
+#
+
+"""Vector and matrix math module
+
+ Version 1.5
+ by onk@section5.de
+
+ This is a lightweight 3D matrix and vector module, providing basic vector
+ and matrix math plus a more object oriented layer.
+
+ For examples, look at vect.test()
+"""
+
+VERSION = 1.5
+
+TOLERANCE = 0.0000001
+
+VectorType = 'Vector3'
+MatrixType = 'Matrix3'
+FloatType = type(1.0)
+
+def dot(x, y):
+ "(x,y) - Returns the dot product of vector 'x' and 'y'"
+ return (x[0] * y[0] + x[1] * y[1] + x[2] * y[2])
+
+def cross(x, y):
+ "(x,y) - Returns the cross product of vector 'x' and 'y'"
+ return (x[1] * y[2] - x[2] * y[1],
+ x[2] * y[0] - x[0] * y[2],
+ x[0] * y[1] - x[1] * y[0])
+
+def matrix():
+ "Returns Unity matrix"
+ return ((1.0, 0.0, 0.0), (0.0, 1.0, 0.0), (0.0, 0.0, 1.0))
+
+def matxvec(m, x):
+ "y = matxvec(m,x) - Returns product of Matrix 'm' and vector 'x'"
+ vx = m[0][0] * x[0] + m[1][0] * x[1] + m[2][0] * x[2]
+ vy = m[0][1] * x[0] + m[1][1] * x[1] + m[2][1] * x[2]
+ vz = m[0][2] * x[0] + m[1][2] * x[1] + m[2][2] * x[2]
+ return (vx, vy, vz)
+
+def matfromnormal(z, y = (0.0, 1.0, 0.0)):
+ """(z, y) - returns transformation matrix for local coordinate system
+ where 'z' = local z, with optional *up* axis 'y'"""
+ y = norm3(y)
+ x = cross(y, z)
+ y = cross(z, x)
+ return (x, y, z)
+
+def matxmat(m, n):
+ "(m,n) - Returns matrix product of 'm' and 'n'"
+ return (matxvec(m, n[0]), matxvec(m, n[1]), matxvec(m, n[2]))
+
+def len(x):
+ "(x) - Returns the length of vector 'x'"
+ import math
+ return math.sqrt(x[0]*x[0] + x[1]*x[1] + x[2]*x[2])
+
+len3 = len # compatiblity reasons
+
+def norm3(x):
+ "(x) - Returns the vector 'x' normed to 1.0"
+ import math
+ r = math.sqrt(x[0]*x[0] + x[1]*x[1] + x[2]*x[2])
+ return (x[0]/r, x[1]/r, x[2]/r)
+
+def add3(x, y):
+ "(x,y) - Returns vector ('x' + 'y')"
+ return (x[0]+y[0], x[1]+y[1], x[2]+y[2])
+
+def sub3(x, y):
+ "(x,y) - Returns vector ('x' - 'y')"
+ return ((x[0] - y[0]), (x[1] - y[1]), (x[2] - y[2]))
+
+def dist3(x, y):
+ "(x,y) - Returns euclidian distance from Point 'x' to 'y'"
+ return len3(sub3(x, y))
+
+def scale3(s, x):
+ "(s,x) - Returns the vector 'x' scaled by 's'"
+ return (s*x[0], s*x[1], s*x[2])
+
+def scalemat(s,m):
+ "(s,m) - Returns the Matrix 'm' scaled by 's'"
+ return (scale3(s, m[0]), scale3(s, m[1]), scale3(s,m[2]))
+
+def invmatdet(m):
+ """n, det = invmat(m) - Inverts matrix without determinant correction.
+ Inverse matrix 'n' and Determinant 'det' are returned"""
+
+ # Matrix: (row vectors)
+ # 00 10 20
+ # 01 11 21
+ # 02 12 22
+
+ wk = [0.0, 0.0, 0.0]
+
+ t = m[1][1] * m[2][2] - m[1][2] * m[2][1]
+ wk[0] = t
+ det = t * m[0][0]
+
+ t = m[2][1] * m[0][2] - m[0][1] * m[2][2]
+ wk[1] = t
+ det = det + t * m[1][0]
+
+ t = m[0][1] * m[1][2] - m[1][1] * m[0][2]
+ wk[2] = t
+ det = det + t * m[2][0]
+
+ v0 = (wk[0], wk[1], wk[2])
+
+ t = m[2][0] * m[1][2] - m[1][0] * m[2][2]
+ wk[0] = t
+ det = det + t * m[0][1]
+
+ t = m[0][0] * m[2][2] - m[0][2] * m[2][0]
+ wk[1] = t
+ det = det + t * m[1][1]
+
+ t = m[1][0] * m[0][2] - m[0][0] * m[1][2]
+ wk[2] = t
+ det = det + t * m[2][1]
+
+ v1 = (wk[0], wk[1], wk[2])
+
+ t = m[1][0] * m[2][1] - m[1][1] * m[2][0]
+ wk[0] = t
+ det = det + t * m[0][2]
+
+ t = m[2][0] * m[0][1] - m[0][0] * m[2][1]
+ wk[1] = t
+ det = det + t * m[1][2]
+
+ t = m[0][0] * m[1][1] - m[1][0] * m[0][1]
+ wk[2] = t
+ det = det + t * m[2][2]
+
+ v2 = (wk[0], wk[1], wk[2])
+ # det = 3 * determinant
+ return ((v0,v1,v2), det/3.0)
+
+def invmat(m):
+ "(m) - Inverts the 3x3 matrix 'm', result in 'n'"
+ n, det = invmatdet(m)
+ if det < 0.000001:
+ raise ZeroDivisionError, "minor rank matrix"
+ d = 1.0/det
+ return (scale3(d, n[0]),
+ scale3(d, n[1]),
+ scale3(d, n[2]))
+
+def transmat(m):
+ # can be used to invert orthogonal rotation matrices
+ "(m) - Returns transposed matrix of 'm'"
+ return ((m[0][0], m[1][0], m[2][0]),
+ (m[0][1], m[1][1], m[2][1]),
+ (m[0][2], m[1][2], m[2][2]))
+
+def coplanar(verts):
+ "checks whether list of 4 vertices is coplanar"
+ v1 = verts[0]
+ v2 = verts[1]
+ a = sub3(v2, v1)
+ v1 = verts[1]
+ v2 = verts[2]
+ b = sub3(v2, v1)
+ if dot(cross(a,b), sub3(verts[3] - verts[2])) < 0.0001:
+ return 1
+ return 0
+
+################################################################################
+# Matrix / Vector highlevel
+# (and slower)
+# TODO: include better type checks !
+
+class Vector:
+ """Vector class
+
+ This vector class provides vector operations as addition, multiplication, etc.
+
+ Usage::
+
+ v = Vector(x, y, z)
+
+ where 'x', 'y', 'z' are float values, representing coordinates.
+ Note: This datatype emulates a float triple."""
+
+ def __init__(self, x = 0.0, y = 0.0, z = 0.0):
+ # don't change these to lists, very ugly referencing details...
+ self.v = (x, y, z)
+ # ... can lead to same data being shared by several matrices..
+ # (unless you want this to happen)
+ self.type = VectorType
+
+ def __neg__(self):
+ return self.new(-self.v[0], -self.v[1], -self.v[2])
+
+ def __getitem__(self, i):
+ "Tuple emulation"
+ return self.v[i]
+
+# def __setitem__(self, i, arg):
+# self.v[i] = arg
+
+ def new(self, *args):
+ return Vector(args[0], args[1], args[2])
+
+ def __cmp__(self, v):
+ "Comparison only supports '=='"
+ if self[0] == v[0] and self[1] == v[1] and self[1] == v[1]:
+ return 0
+ return 1
+
+ def __add__(self, v):
+ "Addition of 'Vector' objects"
+ return self.new(self[0] + v[0],
+ self[1] + v[1],
+ self[2] + v[2])
+
+ def __sub__(self, v):
+ "Subtraction of 'Vector' objects"
+ return self.new(self[0] - v[0],
+ self[1] - v[1],
+ self[2] - v[2])
+
+ def __rmul__(self, s): # scaling by s
+ return self.new(s * self[0], s * self[1], s * self[2])
+
+ def __mul__(self, t): # dot product
+ """Left multiplikation supports:
+
+ - scaling with a float value
+
+ - Multiplikation with *Matrix* object"""
+
+ if type(t) == FloatType:
+ return self.__rmul__(t)
+ elif t.type == MatrixType:
+ return Matrix(self[0] * t[0], self[1] * t[1], self[2] * t[2])
+ else:
+ return dot(self, t)
+
+ def cross(self, v):
+ "(Vector v) - returns the cross product of 'self' with 'v'"
+ return self.new(self[1] * v[2] - self[2] * v[1],
+ self[2] * v[0] - self[0] * v[2],
+ self[0] * v[1] - self[1] * v[0])
+
+ def __repr__(self):
+ return "(%.3f, %.3f, %.3f)" % (self.v[0], self.v[1], self.v[2])
+
+class Matrix(Vector):
+ """Matrix class
+
+ This class is representing a vector of Vectors.
+
+ Usage::
+
+ M = Matrix(v1, v2, v3)
+
+ where 'v'n are Vector class instances.
+ Note: This datatype emulates a 3x3 float array."""
+
+ def __init__(self, v1 = Vector(1.0, 0.0, 0.0),
+ v2 = Vector(0.0, 1.0, 0.0),
+ v3 = Vector(0.0, 0.0, 1.0)):
+ self.v = [v1, v2, v3]
+ self.type = MatrixType
+
+ def __setitem__(self, i, arg):
+ self.v[i] = arg
+
+ def new(self, *args):
+ return Matrix(args[0], args[1], args[2])
+
+ def __repr__(self):
+ return "Matrix:\n %s\n %s\n %s\n" % (self.v[0], self.v[1], self.v[2])
+
+ def __mul__(self, m):
+ """Left multiplication supported with:
+
+ - Scalar (float)
+
+ - Matrix
+
+ - Vector: row_vector * matrix; same as self.transposed() * vector
+"""
+ try:
+ if type(m) == FloatType:
+ return self.__rmul__(m)
+ if m.type == MatrixType:
+ M = matxmat(self, m)
+ return self.new(Vector(M[0][0], M[0][1], M[0][2]),
+ Vector(M[1][0], M[1][1], M[1][2]),
+ Vector(M[2][0], M[2][1], M[2][2]))
+ if m.type == VectorType:
+ v = matxvec(self, m)
+ return Vector(v[0], v[1], v[2])
+ except:
+ raise TypeError, "bad multiplicator type"
+
+ def inverse(self):
+ """returns the matrix inverse"""
+ M = invmat(self)
+ return self.new(Vector(M[0][0], M[0][1], M[0][2]),
+ Vector(M[1][0], M[1][1], M[1][2]),
+ Vector(M[2][0], M[2][1], M[2][2]))
+
+ def transposed(self):
+ "returns the transposed matrix"
+ M = self
+ return self.new(Vector(M[0][0], M[1][0], M[2][0]),
+ Vector(M[1][0], M[1][1], M[2][1]),
+ Vector(M[2][0], M[1][2], M[2][2]))
+
+ def det(self):
+ """returns the determinant"""
+ M, det = invmatdet(self)
+ return det
+
+ def tr(self):
+ """returns trace (sum of diagonal elements) of matrix"""
+ return self.v[0][0] + self.v[1][1] + self.v[2][2]
+
+ def __rmul__(self, m):
+ "Right multiplication supported with scalar"
+ if type(m) == FloatType:
+ return self.new(m * self[0],
+ m * self[1],
+ m * self[2])
+ else:
+ raise TypeError, "bad multiplicator type"
+
+ def __div__(self, m):
+ """Division supported with:
+
+ - Scalar
+
+ - Matrix: a / b equivalent b.inverse * a
+"""
+ if type(m) == FloatType:
+ m = 1.0 /m
+ return m * self
+ elif m.type == MatrixType:
+ return self.inverse() * m
+ else:
+ raise TypeError, "bad multiplicator type"
+
+ def __rdiv__(self, m):
+ "Right division of matrix equivalent to multiplication with matrix.inverse()"
+ return m * self.inverse()
+
+ def asEuler(self):
+ """returns Matrix 'self' as Eulers. Note that this not the only result, due to
+the nature of sin() and cos(). The Matrix MUST be a rotation matrix, i.e. orthogonal and
+normalized."""
+ from math import cos, sin, acos, asin, atan2, atan
+ mat = self.v
+ sy = mat[0][2]
+ # for numerical stability:
+ if sy > 1.0:
+ if sy > 1.0 + TOLERANCE:
+ raise RuntimeError, "FATAL: bad matrix given"
+ else:
+ sy = 1.0
+ phi_y = -asin(sy)
+
+ if abs(sy) > (1.0 - TOLERANCE):
+ # phi_x can be arbitrarely chosen, we set it = 0.0
+ phi_x = 0.0
+ sz = mat[1][0]
+ cz = mat[2][0]
+ phi_z = atan(sz/cz)
+ else:
+ cy = cos(phi_y)
+ cz = mat[0][0] / cy
+ sz = mat[0][1] / cy
+ phi_z = atan2(sz, cz)
+
+ sx = mat[1][2] / cy
+ cx = mat[2][2] / cy
+ phi_x = atan2(sx, cx)
+ return phi_x, phi_y, phi_z
+
+Ex = Vector(1.0, 0.0, 0.0)
+Ey = Vector(0.0, 1.0, 0.0)
+Ez = Vector(0.0, 0.0, 1.0)
+
+One = Matrix(Ex, Ey, Ez)
+orig = (0.0, 0.0, 0.0)
+
+def rotmatrix(phi_x, phi_y, phi_z, reverse = 0):
+ """Creates rotation matrix from euler angles. Rotations are applied in order
+X, then Y, then Z. If the reverse is desired, you have to transpose the matrix after."""
+ from math import sin, cos
+ s = sin(phi_z)
+ c = cos(phi_z)
+ matz = Matrix(Vector(c, s, 0.0), Vector(-s, c, 0.0), Ez)
+
+ s = sin(phi_y)
+ c = cos(phi_y)
+ maty = Matrix(Vector(c, 0.0, -s), Ey, Vector(s, 0.0, c))
+
+ s = sin(phi_x)
+ c = cos(phi_x)
+ matx = Matrix(Ex, Vector(0.0, c, s), Vector(0.0, -s, c))
+
+ return matz * maty * matx
+
+
+def test():
+ "The module test"
+ print "********************"
+ print "VECTOR TEST"
+ print "********************"
+
+ a = Vector(1.1, 0.0, 0.0)
+ b = Vector(0.0, 2.0, 0.0)
+
+ print "vectors: a = %s, b = %s" % (a, b)
+ print "dot:", a * a
+ print "scalar:", 4.0 * a
+ print "scalar:", a * 4.0
+ print "cross:", a.cross(b)
+ print "add:", a + b
+ print "sub:", a - b
+ print "sub:", b - a
+ print
+ print "********************"
+ print "MATRIX TEST"
+ print "********************"
+ c = a.cross(b)
+ m = Matrix(a, b, c)
+ v = Vector(1.0, 2.0, 3.0)
+ E = One
+ print "Original", m
+ print "det", m.det()
+ print "add", m + m
+ print "scalar", 0.5 * m
+ print "sub", m - 0.5 * m
+ print "vec mul", v * m
+ print "mul vec", m * v
+ n = m * m
+ print "mul:", n
+ print "matrix div (mul inverse):", n / m
+ print "scal div (inverse):", 1.0 / m
+ print "mat * inverse", m * m.inverse()
+ print "mat * inverse (/-notation):", m * (1.0 / m)
+ print "div scal", m / 2.0
+
+# matrices with rang < dimension have det = 0.0
+ m = Matrix(a, 2.0 * a, c)
+ print "minor rang", m
+ print "det:", m.det()
+
+if __name__ == '__main__':
+ test()
+
diff --git a/intern/python/modules/util/vectools.py b/intern/python/modules/util/vectools.py
new file mode 100644
index 00000000000..860cd568875
--- /dev/null
+++ b/intern/python/modules/util/vectools.py
@@ -0,0 +1,142 @@
+"""Vector tools
+
+ Various vector tools, basing on vect.py"""
+
+from vect import *
+
+EPSILON = 0.0001
+
+def vecarea(v, w):
+ "Computes area of the span of vector 'v' and 'w' in 2D (not regarding z coordinate)"
+ return v[0]*w[1] - v[1]*w[0]
+
+def intersect(a1, b1, a2, b2):
+ """Computes 2D intersection of edges ('a1' -> 'b1') and ('a2' -> 'b2'),
+returning normalized intersection parameter 's' of edge (a1 -> b1).
+If 0.0 < 's' <= 1.0,
+the two edges intersect at the point::
+
+ v = a1 + s * (b1 - a1)
+"""
+ v = (b1[0] - a1[0], b1[1] - a1[1])
+ w = (b2[0] - a2[0], b2[1] - a2[1])
+ d0 = (a2[0] - a1[0])
+ d1 = (a2[1] - a1[1])
+
+ det = w[0]*v[1] - w[1]*v[0]
+ if det == 0: return 0.0
+ t = v[0]*d1 - v[1]*d0
+ s = w[0]*d1 - w[1]*d0
+ s /= det
+ t /= det
+ if s > 1.0 or s < 0.0: return 0.0
+ if t > 1.0 or t < 0.0: return 0.0
+ return s
+
+def insidetri(a, b, c, x):
+ "Returns 1 if 'x' is inside the 2D triangle ('a' -> 'b' -> 'c'), 0 otherwise"
+ v1 = norm3(sub3(b, a))
+ v2 = norm3(sub3(c, a))
+ v3 = norm3(sub3(x, a))
+
+ a1 = (vecarea(v1, v2))
+ a2 = (vecarea(v1, v3))
+ lo = min(0.0, a1)
+ hi = max(0.0, a1)
+
+ if a2 < lo or a2 > hi: return 0
+
+ v2 = norm3(sub3(b, c))
+ v3 = norm3(sub3(b, x))
+
+ a1 = (vecarea(v1, v2))
+ a2 = (vecarea(v1, v3))
+
+ lo = min(0.0, a1)
+ hi = max(0.0, a1)
+
+ if a2 < lo or a2 > hi: return 0
+
+ return 1
+
+def plane_fromface(v1, v2, v3):
+ "Returns plane (normal, point) from 3 vertices 'v1', 'v2', 'v3'"
+ v = sub3(v2, v1)
+ w = sub3(v3, v1)
+ n = norm3(cross(v, w))
+ return n, v1
+
+def inside_halfspace(vec, plane):
+ "Returns 1 if point 'vec' inside halfspace defined by 'plane'"
+ n, t = plane
+ n = norm3(n)
+ v = sub3(vec, t)
+ if dot(n, v) < 0.0:
+ return 1
+ else:
+ return 0
+
+def half_space(vec, plane, tol = EPSILON):
+ """Determine whether point 'vec' is inside (return value -1), outside (+1)
+, or lying in the plane 'plane' (return 0) of a numerical thickness
+'tol' = 'EPSILON' (default)."""
+ n, t = plane
+ v = sub3(vec, t)
+ fac = len3(n)
+
+ d = dot(n, v)
+ if d < -fac * tol:
+ return -1
+ elif d > fac * tol:
+ return 1
+ else:
+ return 0
+
+
+def plane_edge_intersect(plane, edge):
+ """Returns normalized factor 's' of the intersection of 'edge' with 'plane'.
+The point of intersection on the plane is::
+
+ p = edge[0] + s * (edge[1] - edge[0])
+
+"""
+ n, t = plane # normal, translation
+ mat = matfromnormal(n)
+ mat = transmat(mat) # inverse
+ v = matxvec(mat, sub3(edge[0], t)) #transformed edge points
+ w = matxvec(mat, sub3(edge[1], t))
+ w = sub3(w, v)
+ if w[2] != 0.0:
+ s = -v[2] / w[2]
+ return s
+ else:
+ return None
+
+def insidecube(v):
+ "Returns 1 if point 'v' inside normalized cube, 0 otherwise"
+ if v[0] > 1.0 or v[0] < 0.0:
+ return 0
+ if v[1] > 1.0 or v[1] < 0.0:
+ return 0
+ if v[2] > 1.0 or v[2] < 0.0:
+ return 0
+ return 1
+
+
+def flatproject(verts, up):
+ """Projects a 3D set (list of vertices) 'verts' into a 2D set according to
+an 'up'-vector"""
+ z, t = plane_fromface(verts[0], verts[1], verts[2])
+ y = norm3(up)
+ x = cross(y, z)
+ uvs = []
+ for v in verts:
+ w = (v[0] - t[0], v[1] - t[1], v[2] - t[2])
+ # this is the transposed 2x2 matrix * the vertex vector
+ uv = (dot(x, w), dot(y,w)) # do projection
+ uvs.append(uv)
+ return uvs
+
+
+
+
diff --git a/intern/python/modules/vrml/__init__.py b/intern/python/modules/vrml/__init__.py
new file mode 100644
index 00000000000..9e2ebe0fb86
--- /dev/null
+++ b/intern/python/modules/vrml/__init__.py
@@ -0,0 +1 @@
+"""The VRML import module"""
diff --git a/intern/python/modules/vrml/basenodes.py b/intern/python/modules/vrml/basenodes.py
new file mode 100644
index 00000000000..484f000738a
--- /dev/null
+++ b/intern/python/modules/vrml/basenodes.py
@@ -0,0 +1,974 @@
+from scenegraph import Prototype, NULL, sceneGraph, IS, Script, ExternalPrototype, ROUTE
+PROTO = Prototype
+EXTERNPROTO = ExternalPrototype
+
+Anchor = Prototype( "Anchor",
+ {
+ 'bboxSize':('bboxSize', 'SFVec3f', 0),
+ 'children':('children', 'MFNode', 1),
+ 'parameter':('parameter', 'MFString', 1),
+ 'url':('url', 'MFString', 1),
+ 'description':('description', 'SFString', 1),
+ 'bboxCenter':('bboxCenter', 'SFVec3f', 0),
+ },
+ {
+ 'bboxSize':[-1.0, -1.0, -1.0],
+ 'children':[],
+ 'parameter':[],
+ 'url':[],
+ 'description':'',
+ 'bboxCenter':[0.0, 0.0, 0.0],
+ },
+ {
+ 'addChildren':('addChildren', 'MFNode', 0),
+ 'removeChildren':('removeChildren', 'MFNode', 0),
+ },
+)
+Appearance = Prototype( "Appearance",
+ {
+ 'material':('material', 'SFNode', 1),
+ 'texture':('texture', 'SFNode', 1),
+ 'textureTransform':('textureTransform', 'SFNode', 1),
+ },
+ {
+ 'material':NULL,
+ 'texture':NULL,
+ 'textureTransform':NULL,
+ },
+ {
+ },
+)
+AudioClip = Prototype( "AudioClip",
+ {
+ 'pitch':('pitch', 'SFFloat', 1),
+ 'loop':('loop', 'SFBool', 1),
+ 'description':('description', 'SFString', 1),
+ 'stopTime':('stopTime', 'SFTime', 1),
+ 'startTime':('startTime', 'SFTime', 1),
+ 'url':('url', 'MFString', 1),
+ },
+ {
+ 'pitch':1.0,
+ 'loop':0,
+ 'description':'',
+ 'stopTime':0.0,
+ 'startTime':0.0,
+ 'url':[],
+ },
+ {
+ 'isActive':('isActive', 'SFBool', 1),
+ 'duration_changed':('duration_changed', 'SFTime', 1),
+ },
+)
+Background = Prototype( "Background",
+ {
+ 'groundAngle':('groundAngle', 'MFFloat', 1),
+ 'skyAngle':('skyAngle', 'MFFloat', 1),
+ 'frontUrl':('frontUrl', 'MFString', 1),
+ 'bottomUrl':('bottomUrl', 'MFString', 1),
+ 'groundColor':('groundColor', 'MFColor', 1),
+ 'backUrl':('backUrl', 'MFString', 1),
+ 'skyColor':('skyColor', 'MFColor', 1),
+ 'topUrl':('topUrl', 'MFString', 1),
+ 'rightUrl':('rightUrl', 'MFString', 1),
+ 'leftUrl':('leftUrl', 'MFString', 1),
+ },
+ {
+ 'groundAngle':[],
+ 'skyAngle':[],
+ 'frontUrl':[],
+ 'bottomUrl':[],
+ 'groundColor':[],
+ 'backUrl':[],
+ 'skyColor':[[0.0, 0.0, 0.0]],
+ 'topUrl':[],
+ 'rightUrl':[],
+ 'leftUrl':[],
+ },
+ {
+ 'isBound':('isBound', 'SFBool', 1),
+ 'set_bind':('set_bind', 'SFBool', 0),
+ },
+)
+Billboard = Prototype( "Billboard",
+ {
+ 'bboxCenter':('bboxCenter', 'SFVec3f', 0),
+ 'bboxSize':('bboxSize', 'SFVec3f', 0),
+ 'children':('children', 'MFNode', 1),
+ 'axisOfRotation':('axisOfRotation', 'SFVec3f', 1),
+ },
+ {
+ 'bboxCenter':[0.0, 0.0, 0.0],
+ 'bboxSize':[-1.0, -1.0, -1.0],
+ 'children':[],
+ 'axisOfRotation':[0.0, 1.0, 0.0],
+ },
+ {
+ 'addChildren':('addChildren', 'MFNode', 0),
+ 'removeChildren':('removeChildren', 'MFNode', 0),
+ },
+)
+Box = Prototype( "Box",
+ {
+ 'size':('size', 'SFVec3f', 0),
+ },
+ {
+ 'size':[2.0, 2.0, 2.0],
+ },
+ {
+ },
+)
+
+
+Collision = Prototype( "Collision",
+ {
+ 'bboxCenter':('bboxCenter', 'SFVec3f', 0),
+ 'bboxSize':('bboxSize', 'SFVec3f', 0),
+ 'children':('children', 'MFNode', 1),
+ 'collide':('collide', 'SFBool', 1),
+ 'proxy':('proxy', 'SFNode', 0),
+ },
+ {
+ 'bboxCenter':[0.0, 0.0, 0.0],
+ 'bboxSize':[-1.0, -1.0, -1.0],
+ 'children':[],
+ 'collide':1,
+ 'proxy':NULL,
+ },
+ {
+ 'addChildren':('addChildren', 'MFNode', 0),
+ 'removeChildren':('removeChildren', 'MFNode', 0),
+ 'collideTime':('collideTime', 'SFTime', 1),
+ },
+)
+Color = Prototype( "Color",
+ {
+ 'color':('color', 'MFColor', 1),
+ },
+ {
+ 'color':[],
+ },
+ {
+ },
+)
+ColorInterpolator = Prototype( "ColorInterpolator",
+ {
+ 'key':('key', 'MFFloat', 1),
+ 'keyValue':('keyValue', 'MFColor', 1),
+ },
+ {
+ 'key':[],
+ 'keyValue':[],
+ },
+ {
+ 'value_changed':('value_changed', 'SFColor', 1),
+ 'set_fraction':('set_fraction', 'SFFloat', 0),
+ },
+)
+Cone = Prototype( "Cone",
+ {
+ 'bottomRadius':('bottomRadius', 'SFFloat', 0),
+ 'side':('side', 'SFBool', 0),
+ 'bottom':('bottom', 'SFBool', 0),
+ 'height':('height', 'SFFloat', 0),
+ },
+ {
+ 'bottomRadius':1.0,
+ 'side':1,
+ 'bottom':1,
+ 'height':2.0,
+ },
+ {
+ },
+)
+Coordinate = Prototype( "Coordinate",
+ {
+ 'point':('point', 'MFVec3f', 1),
+ },
+ {
+ 'point':[],
+ },
+ {
+ },
+)
+CoordinateInterpolator = Prototype( "CoordinateInterpolator",
+ {
+ 'key':('key', 'MFFloat', 1),
+ 'keyValue':('keyValue', 'MFVec3f', 1),
+ },
+ {
+ 'key':[],
+ 'keyValue':[],
+ },
+ {
+ 'value_changed':('value_changed', 'MFVec3f', 1),
+ 'set_fraction':('set_fraction', 'SFFloat', 0),
+ },
+)
+Cylinder = Prototype( "Cylinder",
+ {
+ 'bottom':('bottom', 'SFBool', 0),
+ 'side':('side', 'SFBool', 0),
+ 'radius':('radius', 'SFFloat', 0),
+ 'top':('top', 'SFBool', 0),
+ 'height':('height', 'SFFloat', 0),
+ },
+ {
+ 'bottom':1,
+ 'side':1,
+ 'radius':1.0,
+ 'top':1,
+ 'height':2.0,
+ },
+ {
+ },
+)
+CylinderSensor = Prototype( "CylinderSensor",
+ {
+ 'maxAngle':('maxAngle', 'SFFloat', 1),
+ 'autoOffset':('autoOffset', 'SFBool', 1),
+ 'minAngle':('minAngle', 'SFFloat', 1),
+ 'enabled':('enabled', 'SFBool', 1),
+ 'offset':('offset', 'SFFloat', 1),
+ 'diskAngle':('diskAngle', 'SFFloat', 1),
+ },
+ {
+ 'maxAngle':-1.0,
+ 'autoOffset':1,
+ 'minAngle':0.0,
+ 'enabled':1,
+ 'offset':0.0,
+ 'diskAngle':0.262,
+ },
+ {
+ 'rotation_changed':('rotation_changed', 'SFRotation', 1),
+ 'isActive':('isActive', 'SFBool', 1),
+ 'trackPoint_changed':('trackPoint_changed', 'SFVec3f', 1),
+ },
+)
+DirectionalLight = Prototype( "DirectionalLight",
+ {
+ 'color':('color', 'SFColor', 1),
+ 'ambientIntensity':('ambientIntensity', 'SFFloat', 1),
+ 'intensity':('intensity', 'SFFloat', 1),
+ 'on':('on', 'SFBool', 1),
+ 'direction':('direction', 'SFVec3f', 1),
+ },
+ {
+ 'color':[1.0, 1.0, 1.0],
+ 'ambientIntensity':0.0,
+ 'intensity':1.0,
+ 'on':1,
+ 'direction':[0.0, 0.0, -1.0],
+ },
+ {
+ },
+)
+ElevationGrid = Prototype( "ElevationGrid",
+ {
+ 'xSpacing':('xSpacing', 'SFFloat', 0),
+ 'zSpacing':('zSpacing', 'SFFloat', 0),
+ 'xDimension':('xDimension', 'SFInt32', 0),
+ 'colorPerVertex':('colorPerVertex', 'SFBool', 0),
+ 'height':('height', 'MFFloat', 0),
+ 'texCoord':('texCoord', 'SFNode', 1),
+ 'normalPerVertex':('normalPerVertex', 'SFBool', 0),
+ 'ccw':('ccw', 'SFBool', 0),
+ 'color':('color', 'SFNode', 1),
+ 'normal':('normal', 'SFNode', 1),
+ 'creaseAngle':('creaseAngle', 'SFFloat', 0),
+ 'solid':('solid', 'SFBool', 0),
+ 'zDimension':('zDimension', 'SFInt32', 0),
+ },
+ {
+ 'xSpacing':0.0,
+ 'zSpacing':0.0,
+ 'xDimension':0,
+ 'colorPerVertex':1,
+ 'height':[],
+ 'texCoord':NULL,
+ 'normalPerVertex':1,
+ 'ccw':1,
+ 'color':NULL,
+ 'normal':NULL,
+ 'creaseAngle':0.0,
+ 'solid':1,
+ 'zDimension':0,
+ },
+ {
+ 'set_height':('set_height', 'MFFloat', 0),
+ },
+)
+Extrusion = Prototype( "Extrusion",
+ {
+ 'endCap':('endCap', 'SFBool', 0),
+ 'scale':('scale', 'MFVec2f', 0),
+ 'ccw':('ccw', 'SFBool', 0),
+ 'crossSection':('crossSection', 'MFVec2f', 0),
+ 'solid':('solid', 'SFBool', 0),
+ 'convex':('convex', 'SFBool', 0),
+ 'creaseAngle':('creaseAngle', 'SFFloat', 0),
+ 'spine':('spine', 'MFVec3f', 0),
+ 'beginCap':('beginCap', 'SFBool', 0),
+ 'orientation':('orientation', 'MFRotation', 0),
+ },
+ {
+ 'endCap':1,
+ 'scale':[[1.0, 1.0]],
+ 'ccw':1,
+ 'crossSection':[[1.0, 1.0], [1.0, -1.0], [-1.0, -1.0], [-1.0, 1.0], [1.0, 1.0]],
+ 'solid':1,
+ 'convex':1,
+ 'creaseAngle':0.0,
+ 'spine':[[0.0, 0.0, 0.0], [0.0, 1.0, 0.0]],
+ 'beginCap':1,
+ 'orientation':[[0.0, 0.0, 1.0, 0.0]],
+ },
+ {
+ 'set_scale':('set_scale', 'MFVec2f', 0),
+ 'set_spine':('set_spine', 'MFVec3f', 0),
+ 'set_orientation':('set_orientation', 'MFRotation', 0),
+ 'set_crossSection':('set_crossSection', 'MFVec2f', 0),
+ },
+)
+Fog = Prototype( "Fog",
+ {
+ 'fogType':('fogType', 'SFString', 1),
+ 'color':('color', 'SFColor', 1),
+ 'visibilityRange':('visibilityRange', 'SFFloat', 1),
+ },
+ {
+ 'fogType':'LINEAR',
+ 'color':[1.0, 1.0, 1.0],
+ 'visibilityRange':0.0,
+ },
+ {
+ 'isBound':('isBound', 'SFBool', 1),
+ 'set_bind':('set_bind', 'SFBool', 0),
+ },
+)
+FontStyle = Prototype( "FontStyle",
+ {
+ 'justify':('justify', 'MFString', 0),
+ 'leftToRight':('leftToRight', 'SFBool', 0),
+ 'spacing':('spacing', 'SFFloat', 0),
+ 'horizontal':('horizontal', 'SFBool', 0),
+ 'language':('language', 'SFString', 0),
+ 'topToBottom':('topToBottom', 'SFBool', 0),
+ 'size':('size', 'SFFloat', 0),
+ 'style':('style', 'SFString', 0),
+ 'family':('family', 'SFString', 0),
+ },
+ {
+ 'justify':['BEGIN'],
+ 'leftToRight':1,
+ 'spacing':1.0,
+ 'horizontal':1,
+ 'language':'',
+ 'topToBottom':1,
+ 'size':1.0,
+ 'style':'PLAIN',
+ 'family':'SERIF',
+ },
+ {
+ },
+)
+Group = Prototype( "Group",
+ {
+ 'bboxSize':('bboxSize', 'SFVec3f', 0),
+ 'children':('children', 'MFNode', 1),
+ 'bboxCenter':('bboxCenter', 'SFVec3f', 0),
+ },
+ {
+ 'bboxSize':[-1.0, -1.0, -1.0],
+ 'children':[],
+ 'bboxCenter':[0.0, 0.0, 0.0],
+ },
+ {
+ 'addChildren':('addChildren', 'MFNode', 0),
+ 'removeChildren':('removeChildren', 'MFNode', 0),
+ },
+)
+ImageTexture = Prototype( "ImageTexture",
+ {
+ 'repeatS':('repeatS', 'SFBool', 0),
+ 'url':('url', 'MFString', 1),
+ 'repeatT':('repeatT', 'SFBool', 0),
+ },
+ {
+ 'repeatS':1,
+ 'url':[],
+ 'repeatT':1,
+ },
+ {
+ },
+)
+IndexedFaceSet = Prototype( "IndexedFaceSet",
+ {
+ 'texCoordIndex':('texCoordIndex', 'MFInt32', 0),
+ 'normalIndex':('normalIndex', 'MFInt32', 0),
+ 'coordIndex':('coordIndex', 'MFInt32', 0),
+ 'convex':('convex', 'SFBool', 0),
+ 'texCoord':('texCoord', 'SFNode', 1),
+ 'normalPerVertex':('normalPerVertex', 'SFBool', 0),
+ 'coord':('coord', 'SFNode', 1),
+ 'ccw':('ccw', 'SFBool', 0),
+ 'color':('color', 'SFNode', 1),
+ 'normal':('normal', 'SFNode', 1),
+ 'creaseAngle':('creaseAngle', 'SFFloat', 0),
+ 'solid':('solid', 'SFBool', 0),
+ 'colorPerVertex':('colorPerVertex', 'SFBool', 0),
+ 'colorIndex':('colorIndex', 'MFInt32', 0),
+ },
+ {
+ 'texCoordIndex':[],
+ 'normalIndex':[],
+ 'coordIndex':[],
+ 'convex':1,
+ 'texCoord':NULL,
+ 'normalPerVertex':1,
+ 'coord':NULL,
+ 'ccw':1,
+ 'color':NULL,
+ 'normal':NULL,
+ 'creaseAngle':0.0,
+ 'solid':1,
+ 'colorPerVertex':1,
+ 'colorIndex':[],
+ },
+ {
+ 'set_normalIndex':('set_normalIndex', 'MFInt32', 0),
+ 'set_colorIndex':('set_colorIndex', 'MFInt32', 0),
+ 'set_texCoordIndex':('set_texCoordIndex', 'MFInt32', 0),
+ 'set_coordIndex':('set_coordIndex', 'MFInt32', 0),
+ },
+)
+IndexedLineSet = Prototype( "IndexedLineSet",
+ {
+ 'coordIndex':('coordIndex', 'MFInt32', 0),
+ 'coord':('coord', 'SFNode', 1),
+ 'colorIndex':('colorIndex', 'MFInt32', 0),
+ 'colorPerVertex':('colorPerVertex', 'SFBool', 0),
+ 'color':('color', 'SFNode', 1),
+ },
+ {
+ 'coordIndex':[],
+ 'coord':NULL,
+ 'colorIndex':[],
+ 'colorPerVertex':1,
+ 'color':NULL,
+ },
+ {
+ 'set_colorIndex':('set_colorIndex', 'MFInt32', 0),
+ 'set_coordIndex':('set_coordIndex', 'MFInt32', 0),
+ },
+)
+Inline = Prototype( "Inline",
+ {
+ 'url':('url', 'MFString', 1),
+ 'bboxSize':('bboxSize', 'SFVec3f', 0),
+ 'bboxCenter':('bboxCenter', 'SFVec3f', 0),
+ },
+ {
+ 'url':[],
+ 'bboxSize':[-1.0, -1.0, -1.0],
+ 'bboxCenter':[0.0, 0.0, 0.0],
+ },
+ {
+ },
+)
+LOD = Prototype( "LOD",
+ {
+ 'level':('level', 'MFNode', 1),
+ 'range':('range', 'MFFloat', 0),
+ 'center':('center', 'SFVec3f', 0),
+ },
+ {
+ 'level':[],
+ 'range':[],
+ 'center':[0.0, 0.0, 0.0],
+ },
+ {
+ },
+)
+Material = Prototype( "Material",
+ {
+ 'emissiveColor':('emissiveColor', 'SFColor', 1),
+ 'transparency':('transparency', 'SFFloat', 1),
+ 'shininess':('shininess', 'SFFloat', 1),
+ 'diffuseColor':('diffuseColor', 'SFColor', 1),
+ 'ambientIntensity':('ambientIntensity', 'SFFloat', 1),
+ 'specularColor':('specularColor', 'SFColor', 1),
+ },
+ {
+ 'emissiveColor':[0.0, 0.0, 0.0],
+ 'transparency':0.0,
+ 'shininess':0.2,
+ 'diffuseColor':[0.8, 0.8, 0.8],
+ 'ambientIntensity':0.2,
+ 'specularColor':[0.0, 0.0, 0.0],
+ },
+ {
+ },
+)
+MovieTexture = Prototype( "MovieTexture",
+ {
+ 'loop':('loop', 'SFBool', 1),
+ 'speed':('speed', 'SFFloat', 1),
+ 'repeatT':('repeatT', 'SFBool', 0),
+ 'repeatS':('repeatS', 'SFBool', 0),
+ 'url':('url', 'MFString', 1),
+ 'startTime':('startTime', 'SFTime', 1),
+ 'stopTime':('stopTime', 'SFTime', 1),
+ },
+ {
+ 'loop':0,
+ 'speed':1.0,
+ 'repeatT':1,
+ 'repeatS':1,
+ 'url':[],
+ 'startTime':0.0,
+ 'stopTime':0.0,
+ },
+ {
+ 'isActive':('isActive', 'SFBool', 1),
+ 'duration_changed':('duration_changed', 'SFFloat', 1),
+ },
+)
+NavigationInfo = Prototype( "NavigationInfo",
+ {
+ 'avatarSize':('avatarSize', 'MFFloat', 1),
+ 'speed':('speed', 'SFFloat', 1),
+ 'headlight':('headlight', 'SFBool', 1),
+ 'visibilityLimit':('visibilityLimit', 'SFFloat', 1),
+ 'type':('type', 'MFString', 1),
+ },
+ {
+ 'avatarSize':[0.25, 1.6, 0.75],
+ 'speed':1.0,
+ 'headlight':1,
+ 'visibilityLimit':0.0,
+ 'type':['WALK'],
+ },
+ {
+ 'isBound':('isBound', 'SFBool', 1),
+ 'set_bind':('set_bind', 'SFBool', 0),
+ },
+)
+Normal = Prototype( "Normal",
+ {
+ 'vector':('vector', 'MFVec3f', 1),
+ },
+ {
+ 'vector':[],
+ },
+ {
+ },
+)
+NormalInterpolator = Prototype( "NormalInterpolator",
+ {
+ 'key':('key', 'MFFloat', 1),
+ 'keyValue':('keyValue', 'MFVec3f', 1),
+ },
+ {
+ 'key':[],
+ 'keyValue':[],
+ },
+ {
+ 'value_changed':('value_changed', 'MFVec3f', 1),
+ 'set_fraction':('set_fraction', 'SFFloat', 0),
+ },
+)
+OrientationInterpolator = Prototype( "OrientationInterpolator",
+ {
+ 'key':('key', 'MFFloat', 1),
+ 'keyValue':('keyValue', 'MFRotation', 1),
+ },
+ {
+ 'key':[],
+ 'keyValue':[],
+ },
+ {
+ 'value_changed':('value_changed', 'SFRotation', 1),
+ 'set_fraction':('set_fraction', 'SFFloat', 0),
+ },
+)
+PixelTexture = Prototype( "PixelTexture",
+ {
+ 'repeatS':('repeatS', 'SFBool', 0),
+ 'image':('image', 'SFImage', 1),
+ 'repeatT':('repeatT', 'SFBool', 0),
+ },
+ {
+ 'repeatS':1,
+ 'image':[0, 0, 0],
+ 'repeatT':1,
+ },
+ {
+ },
+)
+PlaneSensor = Prototype( "PlaneSensor",
+ {
+ 'offset':('offset', 'SFVec3f', 1),
+ 'autoOffset':('autoOffset', 'SFBool', 1),
+ 'minPosition':('minPosition', 'SFVec2f', 1),
+ 'enabled':('enabled', 'SFBool', 1),
+ 'maxPosition':('maxPosition', 'SFVec2f', 1),
+ },
+ {
+ 'offset':[0.0, 0.0, 0.0],
+ 'autoOffset':1,
+ 'minPosition':[0.0, 0.0],
+ 'enabled':1,
+ 'maxPosition':[-1.0, -1.0],
+ },
+ {
+ 'translation_changed':('translation_changed', 'SFVec3f', 1),
+ 'isActive':('isActive', 'SFBool', 1),
+ 'trackPoint_changed':('trackPoint_changed', 'SFVec3f', 1),
+ },
+)
+PointLight = Prototype( "PointLight",
+ {
+ 'ambientIntensity':('ambientIntensity', 'SFFloat', 1),
+ 'color':('color', 'SFColor', 1),
+ 'location':('location', 'SFVec3f', 1),
+ 'radius':('radius', 'SFFloat', 1),
+ 'attenuation':('attenuation', 'SFVec3f', 1),
+ 'intensity':('intensity', 'SFFloat', 1),
+ 'on':('on', 'SFBool', 1),
+ },
+ {
+ 'ambientIntensity':0.0,
+ 'color':[1.0, 1.0, 1.0],
+ 'location':[0.0, 0.0, 0.0],
+ 'radius':100.0,
+ 'attenuation':[1.0, 0.0, 0.0],
+ 'intensity':1.0,
+ 'on':1,
+ },
+ {
+ },
+)
+PointSet = Prototype( "PointSet",
+ {
+ 'coord':('coord', 'SFNode', 1),
+ 'color':('color', 'SFNode', 1),
+ },
+ {
+ 'coord':NULL,
+ 'color':NULL,
+ },
+ {
+ },
+)
+PositionInterpolator = Prototype( "PositionInterpolator",
+ {
+ 'key':('key', 'MFFloat', 1),
+ 'keyValue':('keyValue', 'MFVec3f', 1),
+ },
+ {
+ 'key':[],
+ 'keyValue':[],
+ },
+ {
+ 'value_changed':('value_changed', 'SFVec3f', 1),
+ 'set_fraction':('set_fraction', 'SFFloat', 0),
+ },
+)
+ProximitySensor = Prototype( "ProximitySensor",
+ {
+ 'size':('size', 'SFVec3f', 1),
+ 'center':('center', 'SFVec3f', 1),
+ 'enabled':('enabled', 'SFBool', 1),
+ },
+ {
+ 'size':[0.0, 0.0, 0.0],
+ 'center':[0.0, 0.0, 0.0],
+ 'enabled':1,
+ },
+ {
+ 'enterTime':('enterTime', 'SFTime', 1),
+ 'isActive':('isActive', 'SFBool', 1),
+ 'orientation_changed':('orientation_changed', 'SFRotation', 1),
+ 'exitTime':('exitTime', 'SFTime', 1),
+ 'position_changed':('position_changed', 'SFVec3f', 1),
+ },
+)
+ScalarInterpolator = Prototype( "ScalarInterpolator",
+ {
+ 'key':('key', 'MFFloat', 1),
+ 'keyValue':('keyValue', 'MFFloat', 1),
+ },
+ {
+ 'key':[],
+ 'keyValue':[],
+ },
+ {
+ 'value_changed':('value_changed', 'SFFloat', 1),
+ 'set_fraction':('set_fraction', 'SFFloat', 0),
+ },
+)
+Shape = Prototype( "Shape",
+ {
+ 'appearance':('appearance', 'SFNode', 1),
+ 'geometry':('geometry', 'SFNode', 1),
+ },
+ {
+ 'appearance':NULL,
+ 'geometry':NULL,
+ },
+ {
+ },
+)
+Sound = Prototype( "Sound",
+ {
+ 'spatialize':('spatialize', 'SFBool', 0),
+ 'maxFront':('maxFront', 'SFFloat', 1),
+ 'minBack':('minBack', 'SFFloat', 1),
+ 'maxBack':('maxBack', 'SFFloat', 1),
+ 'minFront':('minFront', 'SFFloat', 1),
+ 'location':('location', 'SFVec3f', 1),
+ 'intensity':('intensity', 'SFFloat', 1),
+ 'direction':('direction', 'SFVec3f', 1),
+ 'source':('source', 'SFNode', 1),
+ 'priority':('priority', 'SFFloat', 1),
+ },
+ {
+ 'spatialize':1,
+ 'maxFront':10.0,
+ 'minBack':1.0,
+ 'maxBack':10.0,
+ 'minFront':1.0,
+ 'location':[0.0, 0.0, 0.0],
+ 'intensity':1.0,
+ 'direction':[0.0, 0.0, 1.0],
+ 'source':NULL,
+ 'priority':0.0,
+ },
+ {
+ },
+)
+Sphere = Prototype( "Sphere",
+ {
+ 'radius':('radius', 'SFFloat', 0),
+ },
+ {
+ 'radius':1.0,
+ },
+ {
+ },
+)
+SphereSensor = Prototype( "SphereSensor",
+ {
+ 'offset':('offset', 'SFRotation', 1),
+ 'autoOffset':('autoOffset', 'SFBool', 1),
+ 'enabled':('enabled', 'SFBool', 1),
+ },
+ {
+ 'offset':[0.0, 1.0, 0.0, 0.0],
+ 'autoOffset':1,
+ 'enabled':1,
+ },
+ {
+ 'rotation_changed':('rotation_changed', 'SFRotation', 1),
+ 'isActive':('isActive', 'SFBool', 1),
+ 'trackPoint_changed':('trackPoint_changed', 'SFVec3f', 1),
+ },
+)
+SpotLight = Prototype( "SpotLight",
+ {
+ 'attenuation':('attenuation', 'SFVec3f', 1),
+ 'ambientIntensity':('ambientIntensity', 'SFFloat', 1),
+ 'cutOffAngle':('cutOffAngle', 'SFFloat', 1),
+ 'direction':('direction', 'SFVec3f', 1),
+ 'color':('color', 'SFColor', 1),
+ 'location':('location', 'SFVec3f', 1),
+ 'radius':('radius', 'SFFloat', 1),
+ 'intensity':('intensity', 'SFFloat', 1),
+ 'beamWidth':('beamWidth', 'SFFloat', 1),
+ 'on':('on', 'SFBool', 1),
+ },
+ {
+ 'attenuation':[1.0, 0.0, 0.0],
+ 'ambientIntensity':0.0,
+ 'cutOffAngle':0.785398,
+ 'direction':[0.0, 0.0, -1.0],
+ 'color':[1.0, 1.0, 1.0],
+ 'location':[0.0, 0.0, 0.0],
+ 'radius':100.0,
+ 'intensity':1.0,
+ 'beamWidth':1.570796,
+ 'on':1,
+ },
+ {
+ },
+)
+Switch = Prototype( "Switch",
+ {
+ 'choice':('choice', 'MFNode', 1),
+ 'whichChoice':('whichChoice', 'SFInt32', 1),
+ },
+ {
+ 'choice':[],
+ 'whichChoice':-1,
+ },
+ {
+ },
+)
+Text = Prototype( "Text",
+ {
+ 'maxExtent':('maxExtent', 'SFFloat', 1),
+ 'string':('string', 'MFString', 1),
+ 'fontStyle':('fontStyle', 'SFNode', 1),
+ 'length':('length', 'MFFloat', 1),
+ },
+ {
+ 'maxExtent':0.0,
+ 'string':[],
+ 'fontStyle':NULL,
+ 'length':[],
+ },
+ {
+ },
+)
+TextureCoordinate = Prototype( "TextureCoordinate",
+ {
+ 'point':('point', 'MFVec2f', 1),
+ },
+ {
+ 'point':[],
+ },
+ {
+ },
+)
+TextureTransform = Prototype( "TextureTransform",
+ {
+ 'center':('center', 'SFVec2f', 1),
+ 'scale':('scale', 'SFVec2f', 1),
+ 'rotation':('rotation', 'SFFloat', 1),
+ 'translation':('translation', 'SFVec2f', 1),
+ },
+ {
+ 'center':[0.0, 0.0],
+ 'scale':[1.0, 1.0],
+ 'rotation':0.0,
+ 'translation':[0.0, 0.0],
+ },
+ {
+ },
+)
+TimeSensor = Prototype( "TimeSensor",
+ {
+ 'loop':('loop', 'SFBool', 1),
+ 'cycleInterval':('cycleInterval', 'SFTime', 1),
+ 'enabled':('enabled', 'SFBool', 1),
+ 'stopTime':('stopTime', 'SFTime', 1),
+ 'startTime':('startTime', 'SFTime', 1),
+ },
+ {
+ 'loop':0,
+ 'cycleInterval':1.0,
+ 'enabled':1,
+ 'stopTime':0.0,
+ 'startTime':0.0,
+ },
+ {
+ 'fraction_changed':('fraction_changed', 'SFFloat', 1),
+ 'isActive':('isActive', 'SFBool', 1),
+ 'time':('time', 'SFTime', 1),
+ 'cycleTime':('cycleTime', 'SFTime', 1),
+ },
+)
+TouchSensor = Prototype( "TouchSensor",
+ {
+ 'enabled':('enabled', 'SFBool', 1),
+ },
+ {
+ 'enabled':1,
+ },
+ {
+ 'hitNormal_changed':('hitNormal_changed', 'SFVec3f', 1),
+ 'hitPoint_changed':('hitPoint_changed', 'SFVec3f', 1),
+ 'touchTime':('touchTime', 'SFTime', 1),
+ 'hitTexCoord_changed':('hitTexCoord_changed', 'SFVec2f', 1),
+ 'isActive':('isActive', 'SFBool', 1),
+ 'isOver':('isOver', 'SFBool', 1),
+ },
+)
+Transform = Prototype( "Transform",
+ {
+ 'bboxSize':('bboxSize', 'SFVec3f', 0),
+ 'children':('children', 'MFNode', 1),
+ 'scaleOrientation':('scaleOrientation', 'SFRotation', 1),
+ 'rotation':('rotation', 'SFRotation', 1),
+ 'translation':('translation', 'SFVec3f', 1),
+ 'bboxCenter':('bboxCenter', 'SFVec3f', 0),
+ 'center':('center', 'SFVec3f', 1),
+ 'scale':('scale', 'SFVec3f', 1),
+ },
+ {
+ 'bboxSize':[-1.0, -1.0, -1.0],
+ 'children':[],
+ 'scaleOrientation':[0.0, 0.0, 1.0, 0.0],
+ 'rotation':[0.0, 0.0, 1.0, 0.0],
+ 'translation':[0.0, 0.0, 0.0],
+ 'bboxCenter':[0.0, 0.0, 0.0],
+ 'center':[0.0, 0.0, 0.0],
+ 'scale':[1.0, 1.0, 1.0],
+ },
+ {
+ 'addChildren':('addChildren', 'MFNode', 0),
+ 'removeChildren':('removeChildren', 'MFNode', 0),
+ },
+)
+Viewpoint = Prototype( "Viewpoint",
+ {
+ 'jump':('jump', 'SFBool', 1),
+ 'orientation':('orientation', 'SFRotation', 1),
+ 'fieldOfView':('fieldOfView', 'SFFloat', 1),
+ 'position':('position', 'SFVec3f', 1),
+ 'description':('description', 'SFString', 0),
+ },
+ {
+ 'jump':1,
+ 'orientation':[0.0, 0.0, 1.0, 0.0],
+ 'fieldOfView':0.785398,
+ 'position':[0.0, 0.0, 10.0],
+ 'description':'',
+ },
+ {
+ 'isBound':('isBound', 'SFBool', 1),
+ 'set_bind':('set_bind', 'SFBool', 0),
+ 'bindTime':('bindTime', 'SFTime', 1),
+ },
+)
+VisibilitySensor = Prototype( "VisibilitySensor",
+ {
+ 'size':('size', 'SFVec3f', 1),
+ 'center':('center', 'SFVec3f', 1),
+ 'enabled':('enabled', 'SFBool', 1),
+ },
+ {
+ 'size':[0.0, 0.0, 0.0],
+ 'center':[0.0, 0.0, 0.0],
+ 'enabled':1,
+ },
+ {
+ 'exitTime':('exitTime', 'SFTime', 1),
+ 'isActive':('isActive', 'SFBool', 1),
+ 'enterTime':('enterTime', 'SFTime', 1),
+ },
+)
+WorldInfo = Prototype( "WorldInfo",
+ {
+ 'title':('title', 'SFString', 0),
+ 'info':('info', 'MFString', 0),
+ },
+ {
+ 'title':'',
+ 'info':[],
+ },
+ {
+ },
+)
diff --git a/intern/python/modules/vrml/fieldcoercian.py b/intern/python/modules/vrml/fieldcoercian.py
new file mode 100644
index 00000000000..a90f1101b5a
--- /dev/null
+++ b/intern/python/modules/vrml/fieldcoercian.py
@@ -0,0 +1,310 @@
+'''
+Field coercian routines.
+
+To replace the field coercian routines, you must edit
+basenodes.py and node.py to import some other coercian
+routines. Basenodes.py is for use by the parser, node
+is used by each node as it checks the validity of its
+attributes.
+'''
+
+import types, sys, string
+from utils import typeclasses, collapse
+
+class FieldCoercian:
+ '''
+ A Field Coercian class allows for creating new behaviours
+ when dealing with the conversion of fields to-and-from
+ particular field types. This allows the programmer to
+ use alternate representations of fields (such as matrix arrays)
+ '''
+ def SFString( self, someobj, targetType=types.StringType, targetName='SFString', convertfunc=str ):
+ '''
+ Allowable types:
+ simple string -> unchanged
+ instance ( an IS ) -> unchanged
+ sequence of length == 1 where first element is a string -> returns first element
+ sequence of length > 1 where all elements are strings -> returns string.join( someobj, '')
+ '''
+ t = type(someobj)
+ if t is targetType:
+ return someobj
+ if t in typeclasses.SequenceTypes:
+ if len( someobj) == 1 and type( someobj[0] ) is targetType:
+ return someobj[0] #
+ elif len(someobj) > 1:
+ try:
+ return string.join( someobj, '')
+ except:
+ pass # is not a sequence of strings...
+ ### if we get here, then an incorrect value was passed
+ raise ValueError, """Attempted to set value for an %s field which is not compatible: %s"""%( targetName, `someobj` )
+
+ def MFString( self, someobj, targetType=types.StringType, targetName='SFString', convertfunc=str ):
+ '''
+ Allowable Types:
+ simple string -> wrapped in a list
+ instance (an IS ) -> unchanged
+ sequence of strings (of any length) -> equivalent list returned
+ '''
+ t = type(someobj)
+ if t is targetType: # a bare string...
+ return [someobj]
+ elif t in typeclasses.SequenceTypes: # is a sequence
+ if not filter( lambda x, t=targetType: x is not t, map( type, someobj) ): # are all strings...
+ if t is not types.ListType:
+ return list( someobj )
+ else:
+ return someobj
+ ### if we get here, then an incorrect value was passed
+ raise ValueError, """Attempted to set value for an %s field which is not compatible: %s"""%( targetName, `someobj` )
+
+ def SFBool( self, someobj, targetType=types.IntType, targetName='SFBool', convertfunc=int):
+ '''
+ Allowable Types:
+ instance (an IS) -> unchanged
+ Any object which is testable for truth/falsehood -> 1 or 0 respectively
+ SFBool should always succeed
+ '''
+ if (type(someobj) in typeclasses.SequenceTypes):
+ try:
+ if hasattr( someobj[0], '__gi__'):
+ return someobj[0]
+ else:
+ someobj = someobj[0]
+ except IndexError: # is a null MFNode
+ pass
+ if someobj:
+ return 1
+ else:
+ return 0
+
+ def SFNode( self, someobj, targetType=types.InstanceType, targetName='SFNode', convertfunc=None):
+ '''
+ Allowable Types:
+ instance of a Node -> unchanged
+ instance (an IS or USE) -> unchanged
+ sequence of length == 1 where first element is as above -> return first element
+ '''
+ if hasattr( someobj, '__gi__'): # about the only test I have without requiring that elements inherit from Node
+ return someobj
+ elif (type(someobj) in typeclasses.SequenceTypes):
+ try:
+ if hasattr( someobj[0], '__gi__'):
+ return someobj[0]
+ except IndexError: # is a null MFNode
+ pass
+ raise ValueError, """Attempted to set value for an %s field which is not compatible: %s"""%( targetName, `someobj` )
+
+ def MFNode( self, someobj, targetType=types.InstanceType, targetName='MFNode', convertfunc=None):
+ '''
+ Allowable Types:
+ instance (an IS) -> unchanged
+ instance of a Node -> wrapped with a list
+ sequence where all elements are nodes -> returned as list of same
+ '''
+ if hasattr( someobj, '__gi__') and someobj.__gi__ != "IS":
+ # is this a bare SFNode? wrap with a list and return
+ return [someobj]
+ elif hasattr( someobj, "__gi__"): # is this an IS node
+ return someobj
+ elif type(someobj) in typeclasses.SequenceTypes:
+ try:
+ map( getattr, someobj, ['__gi__']*len(someobj) )
+ # is this an IS node wrapped in a list?
+ if len(someobj) == 1 and someobj[0].__gi__ == "IS":
+ return someobj[0]
+ # okay, assume is really nodes...
+ if type(someobj) is types.ListType:
+ return someobj
+ else:
+ return list(someobj)
+ except AttributeError: # something isn't a node
+ pass
+ raise ValueError, """Attempted to set value for an %s field which is not compatible: %s"""%( targetName, `someobj` )
+
+ def SFNumber( self, someobj, targetType, targetName, convertfunc=int ):
+ '''
+ Allowable Types:
+ bare number -> numerically coerced to correct type
+ instance ( an IS ) -> unchanged
+ sequence of length == 1 where first element is a string -> returns first element
+ '''
+ t = type(someobj)
+ if t is targetType or t is types.InstanceType:
+ return someobj
+ elif t in typeclasses.NumericTypes:
+ return convertfunc( someobj)
+ elif t in typeclasses.SequenceTypes:
+ if len( someobj) == 1 and type( someobj[0] ):
+ return convertfunc( someobj[0] ) #
+ ### if we get here, then an incorrect value was passed
+ raise ValueError, """Attempted to set value for an %s field which is not compatible: %s"""%( targetName, `someobj` )
+ def MFInt32 ( self, someobject ):
+ ''' Convert value into a MFInt32 field value (preferably an array, otherwise a list of integers) '''
+ t = type(someobject)
+ value = None
+ if t in typeclasses.SequenceTypes: # is a sequence
+ try:
+ value = map( int, someobject)
+ except:
+ try:
+ value = map( int, collapse.collapse2_safe( someobject) )
+ except:
+ pass
+ elif t in typeclasses.NumericTypes or t is types.StringType:
+ value = [int(someobject)]
+ if value is None:
+ ### if we get here, then an incorrect value was passed
+ raise ValueError, """Attempted to set value for an %s field which is not compatible: %s"""%( targetName, `someobj` )
+ return value
+ SFImage = MFInt32
+ def MFFloat( self, someobject ):
+ ''' Convert value into a MFFloat field value (preferably an array, otherwise a list of integers) '''
+ t = type(someobject)
+ value = None
+ if t in typeclasses.SequenceTypes: # is a sequence
+ try:
+ value = map( float, someobject)
+ except:
+ try:
+ value = map( float, collapse.collapse2_safe( someobject))
+ except:
+ pass
+ elif t in typeclasses.NumericTypes or t is types.StringType:
+ value = [float(someobj)]
+ if value is None:
+ ### if we get here, then an incorrect value was passed
+ raise ValueError, """Attempted to set value for an %s field which is not compatible: %s"""%( targetName, `someobj` )
+ return value
+ def SFVec3f (self, value):
+ ''' Create a new SFVec3f value from value '''
+ t = type(value)
+ try:
+ value = x,y,z = map (float, value)
+ except ValueError:
+ try:
+ value = (x,y,z) = map( float, value[0] )
+ except (IndexError, ValueError):
+ raise ValueError (''' Invalid value for field type SFVec3f: %s'''%(value))
+ return value
+ def SFRotation(self, value):
+ ''' Create a new SFRotation value from value '''
+ t = type(value)
+ try:
+ value = x,y,z, a = map (float, value)
+ except ValueError:
+ try:
+ value = (x,y,z, a) = map( float, value[0] )
+ except (IndexError, ValueError):
+ raise ValueError (''' Invalid value for field type SFRotation: %s'''%(value))
+ # get the normalized vector for x,y,z
+## length = (x*x+y*y+z*z)**.5 or 0.0000
+## value = (x/length,y/length,z/length, a)
+ return value
+ def SFVec2f (self, value):
+ ''' Create a new SFVec3f value from value '''
+ t = type(value)
+ try:
+ value = x,y = map (float, value)
+ except ValueError:
+ try:
+ value = (x,y) = map( float, value[0] )
+ except (IndexError, ValueError):
+ raise ValueError (''' Invalid value for field type SFVec3f: %s'''%(value))
+ return value
+ def SFColor(self, value):
+ ''' Create a new SFVec3f value from value '''
+ t = type(value)
+ try:
+ r,g,b = map (float, value)
+ except ValueError:
+ try:
+ r,g,b = map( float, value[0] )
+ except (IndexError, ValueError):
+ raise ValueError (''' Invalid value for field type SFColor: %s'''%(value))
+ r = max( (0.0, min((r,1.0))) )
+ g = max( (0.0, min((g,1.0))) )
+ b = max( (0.0, min((b,1.0))) )
+ return value
+
+ def MFCompoundNumber( self, someobj, targetName='SFVec3f', convertfunc=float, type=type):
+ '''
+ Allowable Types:
+ instance ( an IS ) -> unchanged
+ # instance ( a matrix ) -> reshaped (eventually)
+ list of lists, sub-sequences of proper length -> unchanged
+ sequence of numeric types of proper length -> converted to list, diced
+ '''
+## if targetName == 'SFColor':
+## import pdb
+## pdb.set_trace()
+ converter = getattr( self, targetName )
+ t = type( someobj)
+ reporterror = 0
+ if t is types.InstanceType:
+ return someobj
+ elif t in typeclasses.SequenceTypes:
+ if not someobj:
+ return []
+ if type( someobj[0] ) is not types.StringType and type( someobj[0] ) in typeclasses.SequenceTypes:
+ try:
+ return map( converter, someobj )
+ except ValueError:
+ pass
+ elif type( someobj[0] ) in typeclasses.NumericTypes or type( someobj[0] ) is types.StringType:
+ # a single-level list?
+ base = map( convertfunc, someobj )
+ # if we get here, someobj is a list
+ if targetName[-2:] == '2f': # vec2f
+ tlen = 2
+ elif targetName[-2:] == 'on': # rotation
+ tlen = 4
+ else:
+ tlen = 3
+ value = []
+ while base:
+ value.append( converter( base[:tlen]) )
+ del base[:tlen]
+ return value
+ raise ValueError, """Attempted to set value for an %s field which is not compatible: %s"""%( targetName, `someobj` )
+ def __call__( self, someobj, targetName):
+ func, args = self.algomap[targetName]
+## try:
+## if targetName == 'SFInt32':
+## import pdb
+## pdb.set_trace()
+ if hasattr( someobj, "__gi__") and someobj.__gi__ == "IS":
+ return someobj
+ else:
+ return apply( func, (self, someobj)+args )
+## except TypeError:
+## print someobj, targetName
+## print func, args
+## raise
+
+ algomap = { \
+ 'SFString': (SFString, (types.StringType, 'SFString', str)), \
+ 'MFString': (MFString, (types.StringType, 'MFString', str)), \
+ 'SFInt32': (SFNumber, (types.IntType, 'SFInt32', int)), \
+ 'SFFloat': (SFNumber, (types.FloatType, 'SFFloat', float)), \
+ 'SFTime': (SFNumber, (types.FloatType, 'SFFloat', float)), \
+ 'SFColor': (SFColor, ()), \
+ 'SFVec2f': (SFVec2f, ()), \
+ 'SFVec3f': (SFVec3f, ()), \
+ 'SFNode': (SFNode, (types.InstanceType, 'SFNode', None)), \
+ 'SFBool': (SFBool, (types.IntType, 'SFBool', int)), \
+ 'SFNode': (SFNode, (types.InstanceType, 'SFNode', None)), \
+ 'MFInt32': (MFInt32, ()), \
+ 'SFImage': (MFInt32, ()), \
+ 'MFTime': (MFFloat, ()), \
+ 'MFFloat': (MFFloat, ()), \
+ 'MFColor': (MFCompoundNumber, ('SFColor', float)), \
+ 'MFVec2f': (MFCompoundNumber, ('SFVec2f', float)), \
+ 'MFVec3f': (MFCompoundNumber, ('SFVec3f', float)), \
+ 'SFRotation': (SFRotation, ()), \
+ 'MFRotation': (MFCompoundNumber, ('SFRotation', float)), \
+ 'MFNode': (MFNode, (types.InstanceType, 'MFNode', None)) \
+ }
+
+FIELDCOERCE = FieldCoercian ()
diff --git a/intern/python/modules/vrml/loader.py b/intern/python/modules/vrml/loader.py
new file mode 100644
index 00000000000..dd53fe49fd3
--- /dev/null
+++ b/intern/python/modules/vrml/loader.py
@@ -0,0 +1,97 @@
+# The VRML loader
+# supports gzipped files
+#
+# TODO: better progress monitoring
+
+import parser
+
+def quiet(txt):
+ pass
+
+debug = quiet
+
+def debug1(txt):
+ print "Loader:", txt
+
+g_last = 0
+
+def getFileType(file):
+ "returns the file type string from 'file'"
+ file.seek(0)
+ magic = file.readline()
+ if magic[:3] == '\037\213\010':
+ file.seek(0)
+ return "gzip"
+ elif magic[:10] == '#VRML V2.0':
+ file.seek(0)
+ return "vrml"
+ else:
+ file.seek(0)
+ return ""
+
+class Loader:
+ def __init__(self, url, progress = None):
+ self.url = url
+ self.debug = debug
+ self.fail = debug
+ self.monitor = debug
+ self.progress = progress
+ self.nodes = 0 # number of nodes parsed
+
+ def getGzipFile(self, file):
+ '''Return gzip file (only called when gzip type is recognised)'''
+ # we now have the local filename and the headers
+ # read the first few bytes, check for gzip magic number
+ self.monitor( "gzip-encoded file... loading gzip library")
+ try:
+ import gzip
+ file = gzip.open(file,"rb")
+ return file
+ except ImportError, value:
+ self.fail("Gzip library unavailable, compressed file cannot be read")
+ except:
+ self.fail("Failed to open Gzip file")
+
+ return None
+
+ def load(self):
+ self.debug("try: load file from %s" % self.url)
+ url = self.url
+
+ # XXX
+ try:
+ file = open(url, 'rb')
+ except IOError, val:
+ self.debug("couldn't open file %s" % url)
+ return None
+
+ if getFileType(file) == 'gzip':
+ file.close()
+ file = self.getGzipFile(url)
+ try:
+ data = file.read()
+ except MemoryError, value:
+ self.fail("Insufficient memory to load file as string", value)
+ return None
+ except IOError, value:
+ self.fail("I/O Error while reading data from file %s "% url)
+ p = parser.Parser(data)
+ if self.progress:
+ scenegraph = p.parse(self.progress)
+ print "progress"
+ else:
+ scenegraph = p.parse()
+
+ self.nodes = p.progresscount # progress
+ del p
+ return scenegraph
+
+
+def load(url, progress = None):
+ l = Loader(url, progress)
+ return l.load()
+
+def test(name = None):
+ if not name:
+ name = '/tmp/gna.wrl'
+ return load(name)
diff --git a/intern/python/modules/vrml/parser.py b/intern/python/modules/vrml/parser.py
new file mode 100644
index 00000000000..1f238126550
--- /dev/null
+++ b/intern/python/modules/vrml/parser.py
@@ -0,0 +1,426 @@
+from TextTools import TextTools
+
+from simpleparse import generator
+
+import scenegraph as proto
+import strop as string
+
+IMPORT_PARSE_TIME = 0.4
+PROGRESS_DEPTH = 5
+
+class UnfinishedError(Exception):
+ pass
+
+class Parser:
+ def __init__( self, data ):
+ self.data = data
+ self.position = 0
+ self.result = proto.sceneGraph()
+ self.finalised = None
+ self.sceneGraphStack = [self.result]
+ self.prototypeStack = []
+ self.nodeStack = []
+ self.fieldTypeStack = []
+ self.readHeader()
+ self.depth = 0
+ self.progresscount = 0
+ def _lines( self, index=None ):
+ if index is None:
+ index = self.position
+ return TextTools.countlines (self.data[:index])
+ def parse( self, progressCallback=None ):
+ datalength = float( len( self.data ))
+ while self.readNext():
+ if progressCallback:
+ if not progressCallback(IMPORT_PARSE_TIME * self.position/datalength ):
+ raise UnfinishedError(
+ "Did not complete parsing, cancelled by user. Stopped at line %s" %(self._lines())
+ )
+ if self.position < len( self.data ):
+ raise UnfinishedError(
+ '''Unable to complete parsing of file, stopped at line %s:\n%s...'''%(self._lines(), self.data[self.position:self.position+120])
+ )
+ return self.result
+ def readHeader( self ):
+ '''Read the file header'''
+ success, tags, next = TextTools.tag( self.data, HEADERPARSER, self.position )
+ if success:
+ self.datalength = len( self.data )
+ #print "header ok"
+ return success
+ else:
+ try:
+ self.decompress()
+ success, tags, next = TextTools.tag( self.data, HEADERPARSER, self.position )
+ self.datalength = len( self.data )
+ return success
+ except:
+ raise ValueError( "Could not find VRML97 header in file!" )
+ def readNext( self):
+ '''Read the next root-level construct'''
+ success, tags, next = TextTools.tag( self.data, ROOTITEMPARSER, self.position )
+## print 'readnext', success
+ if self.position >= self.datalength:
+ print 'reached file end'
+ return None
+ if success:
+# print ' successful parse'
+ self.position = next
+ map (self.rootItem_Item, tags )
+ return success
+ else:
+ return None
+ def rootItem (self, (type, start, stop, (item,))):
+ ''' Process a single root item '''
+ self.rootItem_Item( item )
+ def rootItem_Item( self, item ):
+ result = self._dispatch(item)
+ if result is not None:
+## print "non-null result"
+## print id( self.sceneGraphStack[-1] ), id(self.result )
+ self.sceneGraphStack[-1].children.append( result )
+ def _getString (self, (tag, start, stop, sublist)):
+ ''' Return the raw string for a given interval in the data '''
+ return self.data [start: stop]
+
+ def _dispatch (self, (tag, left, right, sublist)):
+ ''' Dispatch to the appropriate processing function based on tag value '''
+## print "dispatch", tag
+ self.depth += 1
+ if self.depth < PROGRESS_DEPTH:
+ self.progresscount += 1
+ try:
+ meth = getattr (self, tag)
+ except AttributeError:
+ raise AttributeError("Unknown parse tag '%s' found! Check the parser definition!" % (tag))
+ ret = meth( (tag, left, right, sublist) )
+ self.depth -= 1
+ return ret
+
+ def Proto(self, (tag, start, stop, sublist)):
+ ''' Create a new prototype in the current sceneGraph '''
+ # first entry is always ID
+ ID = self._getString ( sublist [0])
+ print "PROTO",ID
+ newNode = proto.Prototype (ID)
+## print "\t",newNode
+ setattr ( self.sceneGraphStack [-1].protoTypes, ID, newNode)
+ self.prototypeStack.append( newNode )
+ # process the rest of the entries with the given stack
+ map ( self._dispatch, sublist [1:] )
+ self.prototypeStack.pop( )
+ def fieldDecl(self,(tag, left, right, (exposure, datatype, name, field))):
+ ''' Create a new field declaration for the current prototype'''
+ # get the definition in recognizable format
+ exposure = self._getString (exposure) == "exposedField"
+ datatype = self._getString (datatype)
+ name = self._getString (name)
+ # get the vrml value for the field
+ self.fieldTypeStack.append( datatype )
+ field = self._dispatch (field)
+ self.fieldTypeStack.pop( )
+ self.prototypeStack[-1].addField ((name, datatype, exposure), field)
+ def eventDecl(self,(tag, left, right, (direction, datatype, name))):
+ # get the definition in recognizable format
+ direction = self._getString (direction) == "eventOut"
+ datatype = self._getString (datatype)
+ name = self._getString (name)
+ # get the vrml value for the field
+ self.prototypeStack[-1].addEvent((name, datatype, direction))
+ def decompress( self ):
+ pass
+ def ExternProto( self, (tag, start, stop, sublist)):
+ ''' Create a new external prototype from a tag list'''
+ # first entry is always ID
+ ID = self._getString ( sublist [0])
+ newNode = proto.Prototype (ID)
+ setattr ( self.sceneGraphStack [-1].protoTypes, ID, newNode)
+ self.prototypeStack.append( newNode )
+ # process the rest of the entries with the given stack
+ map ( self._dispatch, sublist [1:] )
+ self.prototypeStack.pop( )
+ def ExtProtoURL( self, (tag, start, stop, sublist)):
+ ''' add the url to the external prototype '''
+## print sublist
+ values = self.MFString( sublist )
+ self.prototypeStack[-1].url = values
+ return values
+ def extFieldDecl(self, (tag, start, stop, (exposure, datatype, name))):
+ ''' An external field declaration, no default value '''
+ # get the definition in recognizable format
+ exposure = self._getString (exposure) == "exposedField"
+ datatype = self._getString (datatype)
+ name = self._getString (name)
+ # get the vrml value for the field
+ self.prototypeStack[-1].addField ((name, datatype, exposure))
+ def ROUTE(self, (tag, start, stop, names )):
+ ''' Create a new route object, add the current sceneGraph '''
+ names = map(self._getString, names)
+ self.sceneGraphStack [-1].addRoute( names )
+ def Node (self, (tag, start, stop, sublist)):
+ ''' Create new node, returning the value to the caller'''
+## print 'node'
+
+ if sublist[0][0] == 'name':
+ name = self._getString ( sublist [0])
+ ID = self._getString ( sublist [1])
+ rest = sublist [2:]
+ else:
+ name = ""
+ ID = self._getString ( sublist [0])
+ rest = sublist [1:]
+ try:
+ prototype = getattr ( self.sceneGraphStack [-1].protoTypes, ID)
+ except AttributeError:
+ #raise NameError ('''Prototype %s used without declaration! %s:%s'''%(ID, start, stop) )
+ print ('''### Prototype %s used without declaration! %s:%s'''%(ID, start, stop) )
+
+ return None
+ newNode = prototype(name)
+ if name:
+ self.sceneGraphStack [-1].regDefName( name, newNode )
+ self.nodeStack.append (newNode)
+ map (self._dispatch, rest)
+ self.nodeStack.pop ()
+## print 'node finished'
+ return newNode
+ def Attr(self, (tag, start, stop, (name, value))):
+ ''' An attribute of a node or script '''
+ name = self._getString ( name )
+ self.fieldTypeStack.append( self.nodeStack[-1].PROTO.getField( name ).type )
+ value = self._dispatch( value )
+ self.fieldTypeStack.pop()
+ if hasattr( self.nodeStack[-1], "__setattr__" ):
+ self.nodeStack[-1].__setattr__( name, value, raw=1 )
+ else:
+ # use slower coercing versions...
+ setattr( self.nodeStack[-1], name, value )
+ def Script( self, (tag, start, stop, sublist)):
+ ''' A script node (can be a root node)'''
+ # what's the DEF name...
+ if sublist and sublist[0][0] == 'name':
+ name = self._getString ( sublist [0])
+ rest = sublist [1:]
+ else:
+ name = ""
+ rest = sublist
+ # build the script node...
+ newNode = proto.Script( name )
+ # register with sceneGraph
+ if name:
+ self.sceneGraphStack [-1].regDefName( name, newNode )
+ self.nodeStack.append (newNode)
+ map( self._dispatch, rest )
+ self.nodeStack.pop ()
+ return newNode
+ def ScriptEventDecl( self,(tag, left, right, sublist)):
+ # get the definition in recognizable format
+ direction, datatype, name = sublist[:3] # must have at least these...
+ direction = self._getString (direction) == "eventOut"
+ datatype = self._getString (datatype)
+ name = self._getString (name)
+ # get the vrml value for the field
+ self.nodeStack[-1].PROTO.addEvent((name, datatype, direction))
+ if sublist[3:]:
+ # will this work???
+ setattr( self.nodeStack[-1], name, self._dispatch( sublist[3] ) )
+ def ScriptFieldDecl(self,(tag, left, right, (exposure, datatype, name, field))):
+ ''' Create a new field declaration for the current prototype'''
+ # get the definition in recognizable format
+ exposure = self._getString (exposure) == "exposedField"
+ datatype = self._getString (datatype)
+ name = self._getString (name)
+ # get the vrml value for the field
+ self.fieldTypeStack.append( datatype )
+ field = self._dispatch (field)
+ self.fieldTypeStack.pop( )
+ self.nodeStack[-1].PROTO.addField ((name, datatype, exposure))
+ setattr( self.nodeStack[-1], name, field )
+ def SFNull(self, tup):
+ ''' Create a reference to the SFNull node '''
+## print 'hi'
+ return proto.NULL
+ def USE( self, (tag, start, stop, (nametuple,) )):
+ ''' Create a reference to an already defined node'''
+ name = self._getString (nametuple)
+ if self.depth < PROGRESS_DEPTH:
+ self.progresscount += 1
+ try:
+ node = self.sceneGraphStack [-1].defNames [name]
+ return node
+ except KeyError:
+ raise NameError ('''USE without DEF for node %s %s:%s'''%(name, start, stop))
+ def IS(self, (tag, start, stop, (nametuple,))):
+ ''' Create a field reference '''
+ name = self._getString (nametuple)
+ if not self.prototypeStack [-1].getField (name):
+ raise Exception (''' Attempt to create IS mapping of non-existent field %s %s:%s'''%(name, start, stop))
+ return proto.IS(name)
+ def Field( self, (tag, start, stop, sublist)):
+ ''' A field value (of any type) '''
+
+ if sublist and sublist[0][0] in ('USE','Script','Node','SFNull'):
+ if self.fieldTypeStack[-1] == 'SFNode':
+ return self._dispatch( sublist[0] )
+ else:
+ return map( self._dispatch, sublist )
+ elif self.fieldTypeStack[-1] == 'MFNode':
+ return []
+ else:
+ # is a simple data type...
+ function = getattr( self, self.fieldTypeStack[-1] )
+ try:
+ return function( sublist )
+ except ValueError:
+ traceback.print_exc()
+ print sublist
+ raise
+
+ def SFBool( self, (tup,) ):
+ '''Boolean, in Python tradition is either 0 or 1'''
+ return self._getString(tup) == 'TRUE'
+ def SFFloat( self, (x,) ):
+ return string.atof( self._getString(x) )
+ SFTime = SFFloat
+ def SFInt32( self, (x,) ):
+ return string.atoi( self._getString(x), 0 ) # allow for non-decimal numbers
+ def SFVec3f( self, (x,y,z) ):
+ return map( string.atof, map(self._getString, (x,y,z)) )
+ def SFVec2f( self, (x,y) ):
+ return map( string.atof, map(self._getString, (x,y)) )
+ def SFColor( self, (r,g,b) ):
+ return map( string.atof, map(self._getString, (r,g,b)) )
+ def SFRotation( self, (x,y,z,a) ):
+ return map( string.atof, map(self._getString, (x,y,z,a)) )
+
+ def MFInt32( self, tuples ):
+ result = []
+ # localisation
+ atoi = string.atoi
+ append = result.append
+ data = self.data
+ for tag, start, stop, children in tuples:
+ append( atoi( data[start:stop], 0) )
+ return result
+ SFImage = MFInt32
+ def MFFloat( self, tuples ):
+ result = []
+ # localisation
+ atof = string.atof
+ append = result.append
+ data = self.data
+ for tag, start, stop, children in tuples:
+ append( atof( data[start:stop]) )
+ return result
+ MFTime = MFFloat
+ def MFVec3f( self, tuples, length=3, typename='MFVec3f'):
+ result = []
+ # localisation
+ atof = string.atof
+ data = self.data
+ while tuples:
+ newobj = []
+ for tag, start, stop, children in tuples[:length]:
+ newobj.append( atof(data[start:stop] ))
+ if len(newobj) != length:
+ raise ValueError(
+ '''Incorrect number of elements in %s field at line %s'''%(typename, self._lines(stop))
+ )
+ result.append( newobj )
+ del tuples[:length]
+ return result
+ def MFVec2f( self, tuples):
+ return self.MFVec3f( tuples, length=2, typename='MFVec2f')
+ def MFRotation( self, tuples ):
+ return self.MFVec3f( tuples, length=4, typename='MFRotation')
+ def MFColor( self, tuples ):
+ return self.MFVec3f( tuples, length=3, typename='MFColor')
+
+ def MFString( self, tuples ):
+ bigresult = []
+ for (tag, start, stop, sublist) in tuples:
+ result = []
+ for element in sublist:
+ if element[0] == 'CHARNODBLQUOTE':
+ result.append( self.data[element[1]:element[2]] )
+ elif element[0] == 'ESCAPEDCHAR':
+ result.append( self.data[element[1]+1:element[2]] )
+ elif element[0] == 'SIMPLEBACKSLASH':
+ result.append( '\\' )
+ bigresult.append( string.join( result, "") )
+ return bigresult
+## result = []
+## for tuple in tuples:
+## result.append( self.SFString( tuple) )
+## return result
+ def SFString( self, tuples ):
+ '''Return the (escaped) string as a simple Python string'''
+ if tuples:
+ (tag, start, stop, sublist) = tuples[0]
+ if len( tuples ) > 1:
+ print '''Warning: SFString field has more than one string value''', self.data[tuples[0][1]:tuples[-1][2]]
+ result = []
+ for element in sublist:
+ if element[0] == 'CHARNODBLQUOTE':
+ result.append( self.data[element[1]:element[2]] )
+ elif element[0] == 'ESCAPEDCHAR':
+ result.append( self.data[element[1]+1:element[2]] )
+ elif element[0] == 'SIMPLEBACKSLASH':
+ result.append( '\\' )
+ return string.join( result, "")
+ else:
+ raise ValueError( "NULL SFString parsed???!!!" )
+ def vrmlScene( self, (tag, start, stop, sublist)):
+ '''A (prototype's) vrml sceneGraph'''
+ newNode = proto.sceneGraph (root=self.sceneGraphStack [-1])
+ self.sceneGraphStack.append (newNode)
+ #print 'setting proto sceneGraph', `newNode`
+ self.prototypeStack[-1].sceneGraph = newNode
+ results = filter (None, map (self._dispatch, sublist))
+ if results:
+ # items which are not auto-magically inserted into their parent
+ for result in results:
+ newNode.children.append( result)
+ self.sceneGraphStack.pop()
+
+PARSERDECLARATION = r'''header := -[\n]*
+rootItem := ts,(Proto/ExternProto/ROUTE/('USE',ts,USE,ts)/Script/Node),ts
+vrmlScene := rootItem*
+Proto := 'PROTO',ts,nodegi,ts,'[',ts,(fieldDecl/eventDecl)*,']', ts, '{', ts, vrmlScene,ts, '}', ts
+fieldDecl := fieldExposure,ts,dataType,ts,name,ts,Field,ts
+fieldExposure := 'field'/'exposedField'
+dataType := 'SFBool'/'SFString'/'SFFloat'/'SFTime'/'SFVec3f'/'SFVec2f'/'SFRotation'/'SFInt32'/'SFImage'/'SFColor'/'SFNode'/'MFBool'/'MFString'/'MFFloat'/'MFTime'/'MFVec3f'/'MFVec2f'/'MFRotation'/'MFInt32'/'MFColor'/'MFNode'
+eventDecl := eventDirection, ts, dataType, ts, name, ts
+eventDirection := 'eventIn'/'eventOut'
+ExternProto := 'EXTERNPROTO',ts,nodegi,ts,'[',ts,(extFieldDecl/eventDecl)*,']', ts, ExtProtoURL
+extFieldDecl := fieldExposure,ts,dataType,ts,name,ts
+ExtProtoURL := '['?,(ts,SFString)*, ts, ']'?, ts # just an MFString by another name :)
+ROUTE := 'ROUTE',ts, name,'.',name, ts, 'TO', ts, name,'.',name, ts
+Node := ('DEF',ts,name,ts)?,nodegi,ts,'{',ts,(Proto/ExternProto/ROUTE/Attr)*,ts,'}', ts
+Script := ('DEF',ts,name,ts)?,'Script',ts,'{',ts,(ScriptFieldDecl/ScriptEventDecl/Proto/ExternProto/ROUTE/Attr)*,ts,'}', ts
+ScriptEventDecl := eventDirection, ts, dataType, ts, name, ts, ('IS', ts, IS,ts)?
+ScriptFieldDecl := fieldExposure,ts,dataType,ts,name,ts,(('IS', ts,IS,ts)/Field),ts
+SFNull := 'NULL', ts
+
+# should really have an optimised way of declaring a different reporting name for the same production...
+USE := name
+IS := name
+nodegi := name
+Attr := name, ts, (('IS', ts,IS,ts)/Field), ts
+Field := ( '[',ts,((SFNumber/SFBool/SFString/('USE',ts,USE,ts)/Script/Node),ts)*, ']', ts )/((SFNumber/SFBool/SFNull/SFString/('USE',ts,USE,ts)/Script/Node),ts)+
+
+name := -[][0-9{}\000-\020"'#,.\\ ], -[][{}\000-\020"'#,.\\ ]*
+SFNumber := [-+]*, ( ('0',[xX],[0-9]+) / ([0-9.]+,([eE],[-+0-9.]+)?))
+SFBool := 'TRUE'/'FALSE'
+SFString := '"',(CHARNODBLQUOTE/ESCAPEDCHAR/SIMPLEBACKSLASH)*,'"'
+CHARNODBLQUOTE := -[\134"]+
+SIMPLEBACKSLASH := '\134'
+ESCAPEDCHAR := '\\"'/'\134\134'
+<ts> := ( [ \011-\015,]+ / ('#',-'\012'*,'\n')+ )*
+'''
+
+
+PARSERTABLE = generator.buildParser( PARSERDECLARATION )
+HEADERPARSER = PARSERTABLE.parserbyname( "header" )
+ROOTITEMPARSER = PARSERTABLE.parserbyname( "rootItem" )
+
diff --git a/intern/python/modules/vrml/scenegraph.py b/intern/python/modules/vrml/scenegraph.py
new file mode 100644
index 00000000000..2f137b1e259
--- /dev/null
+++ b/intern/python/modules/vrml/scenegraph.py
@@ -0,0 +1,833 @@
+# VRML node prototype class (SGbuilder)
+# Wed Oct 31 16:18:35 CET 2001
+
+'''Prototype2 -- VRML 97 sceneGraph/Node/Script/ROUTE/IS implementations'''
+import copy, types # extern
+import strop as string # builtin
+from utils import typeclasses, err, namespace # XXX
+## TODO: namespace must go
+
+
+class baseProto:
+ def __vrmlStr__( self, **namedargs ):
+ '''Generate a VRML 97-syntax string representing this Prototype
+ **namedargs -- key:value
+ passed arguments for the linearisation object
+ see lineariser4.Lineariser
+ '''
+ import lineariser4
+ lineariser = apply( lineariser4.Lineariser, (), namedargs )
+ return apply( lineariser.linear, ( self, ), namedargs )
+
+ toString = __vrmlStr__
+ # added stuff for linking support for target scenegraph
+ def setTargetnode(self, node):
+ self.__dict__['_targetnode'] = node
+ def getTargetnode(self):
+ try:
+ return self.__dict__['_targetnode']
+ except:
+ return None
+
+class Prototype(baseProto):
+ ''' A VRML 97 Prototype object
+
+ A Prototype is a callable object which produces Node instances
+ the Node uses a pointer to its Prototype to provide much of the
+ Node's standard functionality.
+
+ Prototype's are often stored in a sceneGraph's protoTypes namespace,
+ where you can access them as sceneGraph.protoTypes.nodeGI . They are
+ also commonly found in Nodes' PROTO attributes.
+
+ Attributes:
+ __gi__ -- constant string "PROTO"
+ nodeGI -- string gi
+ The "generic identifier" of the node type, i.e. the name of the node
+ fieldDictionary -- string name: (string name, string dataType, boolean exposed)
+ defaultDictionary -- string name: object defaultValue
+ Will be blank for EXTERNPROTO's and Script prototypes
+ eventDictionary -- string name: (string name, string dataType, boolean eventOut)
+ sceneGraph -- object sceneGraph
+ MFNodeNames -- list of field name strings
+ Allows for easy calculation of "children" nodes
+ SFNodeNames -- list of field name strings
+ Allows for easy calculation of "children" nodes
+ '''
+ __gi__ = "PROTO"
+ def __init__(self, gi, fieldDict=None, defaultDict=None, eventDict=None, sGraph=None):
+ '''
+ gi -- string gi
+ see attribute nodeGI
+ fieldDict -- string name: (string name, string dataType, boolean exposed)
+ see attribute fieldDictionary
+ defaultDict -- string name: object defaultValue
+ see attribute defaultDictionary
+ eventDict -- string name: (string name, string dataType, boolean eventOut)
+ see attribute eventDictionary
+ sceneGraph -- object sceneGraph
+ see attribute sceneGraph
+ '''
+ self.nodeGI = checkName( gi )
+ self.fieldDictionary = {}
+ self.defaultDictionary = {}
+ self.eventDictionary = {}
+ self.SFNodeNames = []
+ self.MFNodeNames = []
+ self.sceneGraph = sGraph
+
+ # setup the fields/events
+ for definition in (fieldDict or {}).values():
+ self.addField( definition, (defaultDict or {}).get( definition[0]))
+ for definition in (eventDict or {}).values():
+ self.addEvent( definition )
+
+ def getSceneGraph( self ):
+ ''' Retrieve the sceneGraph object (may be None object)
+ see attribute sceneGraph'''
+ return self.sceneGraph
+ def setSceneGraph( self, sceneGraph ):
+ ''' Set the sceneGraph object (may be None object)
+ see attribute sceneGraph'''
+ self.sceneGraph = sceneGraph
+ def getChildren(self, includeSceneGraph=None, includeDefaults=1, *args, **namedargs):
+ ''' Calculate the current children of the PROTO and return as a list of nodes
+ if includeDefaults:
+ include those default values which are node values
+ if includeSceneGraph:
+ include the sceneGraph object if it is not None
+
+ see attribute MFNodeNames
+ see attribute SFNodeNames
+ see attribute sceneGraph
+ '''
+ temp = []
+ if includeDefaults:
+ for attrname in self.SFNodeNames:
+ try:
+ temp.append( self.defaultDictionary[attrname] )
+ except KeyError: # sceneGraph object is not copied...
+ pass
+ for attrname in self.MFNodeNames:
+ try:
+ temp[len(temp):] = self.defaultDictionary[attrname]
+ except KeyError:
+ pass
+ if includeSceneGraph and self.sceneGraph:
+ temp.append( self.getSceneGraph() )
+ return temp
+ def addField (self, definition, default = None):
+ ''' Add a single field definition to the Prototype
+ definition -- (string name, string dataType, boolean exposed)
+ default -- object defaultValue
+
+ see attribute fieldDictionary
+ see attribute defaultDictionary
+ '''
+ if type (definition) == types.InstanceType:
+ definition = definition.getDefinition()
+ default = definition.getDefault ()
+ self.removeField( definition[0] )
+ self.fieldDictionary[definition [0]] = definition
+ if default is not None:
+ default = fieldcoercian.FieldCoercian()( default, definition[1] )
+ self.defaultDictionary [definition [0]] = default
+ if definition[1] == 'SFNode':
+ self.SFNodeNames.append(definition[0])
+ elif definition[1] == 'MFNode':
+ self.MFNodeNames.append(definition[0])
+ def removeField (self, key):
+ ''' Remove a single field from the Prototype
+ key -- string fieldName
+ The name of the field to remove
+ '''
+ if self.fieldDictionary.has_key (key):
+ del self.fieldDictionary [key]
+ if self.defaultDictionary.has_key (key):
+ del self.defaultDictionary [key]
+ for attribute in (self.SFNodeNames, self.MFNodeNames):
+ while key in attribute:
+ attribute.remove(key)
+ def addEvent(self, definition):
+ ''' Add a single event definition to the Prototype
+ definition -- (string name, string dataType, boolean eventOut)
+
+ see attribute eventDictionary
+ '''
+ if type (definition) == types.InstanceType:
+ definition = definition.getDefinition()
+ self.eventDictionary[definition [0]] = definition
+ def removeEvent(self, key):
+ ''' Remove a single event from the Prototype
+ key -- string eventName
+ The name of the event to remove
+ '''
+ if self.eventDictionary.has_key (key):
+ del self.eventDictionary [key]
+ def getField( self, key ):
+ '''Return a Field or Event object representing a given name
+ key -- string name
+ The name of the field or event to retrieve
+ will attempt to match key, key[4:], and key [:-8]
+ corresponding to key, set_key and key_changed
+
+ see class Field
+ see class Event
+ '''
+# print self.fieldDictionary, self.eventDictionary
+ for tempkey in (key, key[4:], key[:-8]):
+ if self.fieldDictionary.has_key( tempkey ):
+ return Field( self.fieldDictionary[tempkey], self.defaultDictionary.get(tempkey) )
+ elif self.eventDictionary.has_key( tempkey ):
+ return Event( self.eventDictionary[tempkey] )
+ raise AttributeError, key
+ def getDefault( self, key ):
+ '''Return the default value for the given field
+ key -- string name
+ The name of the field
+ Will attempt to match key, key[4:], and key [:-8]
+ corresponding to key, set_key and key_changed
+
+ see attribute defaultDictionary
+ '''
+ for key in (key, key[4:], key[:-8]):
+ if self.defaultDictionary.has_key( key ):
+ val = self.defaultDictionary[key]
+ if type(val) in typeclasses.MutableTypes:
+ val = copy.deepcopy( val )
+ return val
+ elif self.fieldDictionary.has_key( key ):
+ '''We have the field, but we don't have a default, we are likely an EXTERNPROTO'''
+ return None
+ raise AttributeError, key
+ def setDefault (self, key, value):
+ '''Set the default value for the given field
+ key -- string name
+ The name of the field to set
+ value -- object defaultValue
+ The default value, will be checked for type and coerced if necessary
+ '''
+ field = self.getField (key)
+ self.defaultDictionary [field.name]= field.coerce (value)
+ def clone( self, children = 1, sceneGraph = 1 ):
+ '''Return a copy of this Prototype
+ children -- boolean
+ if true, copy the children of the Prototype, otherwise include them
+ sceneGraph -- boolean
+ if true, copy the sceneGraph of the Prototype
+ '''
+ if sceneGraph:
+ sceneGraph = self.sceneGraph
+ else:
+ sceneGraph = None
+ # defaults should always be copied before modification, but this is still dangerous...
+ defaultDictionary = self.defaultDictionary.copy()
+ if not children:
+ for attrname in self.SFNodeNames+self.MFNodeNames:
+ try:
+ del defaultDictionary[attrname]
+ except KeyError: # sceneGraph object is not copied...
+ pass
+ # now make a copy
+ if self.__gi__ == "PROTO":
+ newNode = self.__class__(
+ self.nodeGI,
+ self.fieldDictionary,
+ defaultDictionary,
+ self.eventDictionary,
+ sceneGraph,
+ )
+ else:
+ newNode = self.__class__(
+ self.nodeGI,
+ self.url,
+ self.fieldDictionary,
+ self.eventDictionary,
+ )
+ return newNode
+ def __call__(self, *args, **namedargs):
+ '''Create a new Node instance associated with this Prototype
+ *args, **namedargs -- passed to the Node.__init__
+ see class Node
+ '''
+ node = apply( Node, (self, )+args, namedargs )
+ return node
+ def __repr__ ( self ):
+ '''Create a simple Python representation'''
+ return '''%s( %s )'''%( self.__class__.__name__, self.nodeGI )
+
+class ExternalPrototype( Prototype ):
+ '''Sub-class of Prototype
+
+ The ExternalPrototype is a minor sub-classing of the Prototype
+ it does not have any defaults, nor a sceneGraph
+
+ Attributes:
+ __gi__ -- constant string "EXTERNPROTO"
+ url -- string list urls
+ implementation source for the ExternalPrototype
+ '''
+ __gi__ = "EXTERNPROTO"
+ def __init__(self, gi, url=None, fieldDict=None, eventDict=None):
+ '''
+ gi -- string gi
+ see attribute nodeGI
+ url -- string list url
+ MFString-compatible list of url's for EXTERNPROTO
+ fieldDict -- string name: (string name, string dataType, boolean exposed)
+ see attribute fieldDictionary
+ eventDict -- string name: (string name, string dataType, boolean eventOut)
+ see attribute eventDictionary
+ '''
+ if url is None:
+ url = []
+ self.url = url
+ Prototype.__init__( self, gi, fieldDict=fieldDict, eventDict=eventDict)
+
+
+from vrml import fieldcoercian # XXX
+class Field:
+ ''' Representation of a Prototype Field
+ The Field object is a simple wrapper to provide convenient
+ access to field coercian and meta- information
+ '''
+ def __init__( self, specification, default=None ):
+ self.name, self.type, self.exposure = specification
+ self.default = default
+ def getDefinition (self):
+ return self.name, self.type, self.exposure
+ def getDefault (self):
+ return self.default
+ def coerce( self, value ):
+ ''' Coerce value to the appropriate dataType for this Field '''
+ return fieldcoercian.FieldCoercian()( value,self.type, )
+ def __repr__( self ):
+ if hasattr (self, "default"):
+ return '%s( (%s,%s,%s), %s)'%( self.__class__.__name__, self.name, self.type, self.exposure, self.default)
+ else:
+ return '%s( (%s,%s,%s),)'%( self.__class__.__name__, self.name, self.type, self.exposure)
+ def __str__( self ):
+ if self.exposure:
+ exposed = "exposedField"
+ else:
+ exposed = field
+ if hasattr (self, "default"):
+ default = ' ' + str( self.default)
+ else:
+ default = ""
+ return '%s %s %s%s'%(exposed, self.type, self.name, default)
+
+class Event (Field):
+ def __str__( self ):
+ if self.exposure:
+ exposed = "eventOut"
+ else:
+ exposed = "eventIn"
+ return '%s %s %s'%(exposed, self.type, self.name)
+
+
+### Translation strings for VRML node names...
+translationstring = '''][0123456789{}"'#,.\\ \000\001\002\003\004\005\006\007\010\011\012\013\014\015\016\017\020\021\022\023'''
+NAMEFIRSTCHARTRANSLATOR = string.maketrans( translationstring, '_'*len(translationstring) )
+translationstring = '''][{}"'#,.\\ \000\001\002\003\004\005\006\007\010\011\012\013\014\015\016\017\020\021\022\023'''
+NAMERESTCHARTRANSLATOR = string.maketrans( translationstring, '_'*len(translationstring) )
+del translationstring
+def checkName( name ):
+ '''Convert arbitrary string to a valid VRML id'''
+ if type(name) is types.StringType:
+ if not name:
+ return name
+ return string.translate( name[:1], NAMEFIRSTCHARTRANSLATOR) + string.translate( name[1:], NAMERESTCHARTRANSLATOR)
+ else:
+ raise TypeError, "VRML Node Name must be a string, was a %s: %s"%(type(name), name)
+
+class Node(baseProto):
+ ''' A VRML 97 Node object
+
+ A Node object represents a VRML 97 node. Attributes of the Node
+ can be set/retrieved with standard python setattr/getattr syntax.
+ VRML 97 attributes may be passed to the constructor as named
+ arguments.
+
+ Attributes:
+ __gi__ -- string PROTOname
+ DEF -- string DEFName
+ The DEF name of the node, will be coerced to be a valid
+ identifier (with "" being considered valid)
+ PROTO -- Prototype PROTO
+ The node's Prototype object
+ attributeDictionary -- string name: object value
+ Dictionary in which VRML 97 attributes are stored
+ '''
+ DEF = '' # the default name for all nodes (arbitrary)
+ def __init__(self, PROTO, name='', attrDict=None, *args, **namedargs):
+ '''Normally this method is only called indirectly via the Prototype() interface
+ PROTO -- Prototype PROTO
+ see attribute PROTO
+ name -- string DEFName
+ see attribute DEF
+ attrDict -- string name: object value
+ see attribute attributeDictionary
+ **namedargs -- string name: object value
+ added to attrDict to create attributeDictionary
+ '''
+ self.__dict__["PROTO"] = PROTO
+ self.DEF = name
+ self.__dict__["attributeDictionary"] = {}
+## print attrDict, namedargs
+ for dict in (attrDict or {}), namedargs:
+ if dict:
+ for key, value in dict.items ():
+ self.__setattr__( key, value, check=1 )
+
+ def __setattr__( self, key, value, check=1, raw=0 ):
+ '''Set attribute on Node
+ key -- string attributeName
+ value -- object attributeValue
+ check -- boolean check
+ if false, put values for unrecognized keys into __dict__
+ otherwise, raise an AttributeError
+ '''
+ if key == "DEF":
+ self.__dict__["DEF"] = checkName( value )
+ return None
+ elif key == "PROTO":
+ self.__dict__["PROTO"] = value
+ try:
+ field = self.PROTO.getField( key )
+ if (hasattr( value, "__gi__") and value.__gi__ == "IS") or raw:
+ self.attributeDictionary[ field.name] = value
+ else:
+ self.attributeDictionary[ field.name] = field.coerce( value )
+ except ValueError, x:
+ raise ValueError( "Could not coerce value %s into value of VRML type %s for %s node %s's field %s"%( value, field.type, self.__gi__, self.DEF, key), x.args)
+ except (AttributeError), x:
+ if check:
+ raise AttributeError("%s is not a known field for node %s"%(key, repr(self)))
+ else:
+ self.__dict__[key] = value
+ def __getattr__( self, key, default = 1 ):
+ ''' Retrieve an attribute when standard lookup fails
+ key -- string attributeName
+ default -- boolean default
+ if true, return the default value if the node does not have local value
+ otherwise, raise AttributeError
+ '''
+ if key != "attributeDictionary":
+ if self.__dict__.has_key( key):
+ return self.__dict__[ key ]
+ elif self.attributeDictionary.has_key( key):
+ return self.attributeDictionary[key]
+ if key != "PROTO":
+ if key == "__gi__":
+ return self.PROTO.nodeGI
+ elif default:
+ try:
+ default = self.PROTO.getDefault( key )
+ if type( default ) in typeclasses.MutableTypes:
+ # we need a copy, not the original
+ default = copy.deepcopy( default )
+ self.__setattr__( key, default, check=0, raw=1 )
+ return default
+ except AttributeError:
+ pass
+ raise AttributeError, key
+ def __delattr__( self, key ):
+ ''' Delete an attribute from the Node
+ key -- string attributeName
+ '''
+ if key != "attributeDictionary":
+ if self.attributeDictionary.has_key( key):
+ del self.attributeDictionary[key]
+ elif self.__dict__.has_key( key):
+ del self.__dict__[ key ]
+ raise AttributeError, key
+
+ def __repr__(self):
+ ''' Create simple python representation '''
+ return '<%s(%s): %s>'%(self.__gi__, `self.DEF`, self.attributeDictionary.keys() )
+ def getChildrenNames( self, current = 1, *args, **namedargs ):
+ ''' Get the (current) children of Node
+ returns two lists: MFNode children, SFNode children
+ current -- boolean currentOnly
+ if true, only return current children
+ otherwise, include all potential children
+ '''
+ MFNODES, SFNODES = self.PROTO.MFNodeNames, self.PROTO.SFNodeNames
+ mns, sns = [],[]
+ for key in MFNODES:
+ if current and self.attributeDictionary.has_key(key):
+ mns.append(key)
+ elif not current:
+ mns.append(key)
+ for key in SFNODES:
+ if self.attributeDictionary.has_key(key):
+ sns.append(key)
+ elif not current:
+ sns.append(key)
+ return mns,sns
+ def calculateChildren(self, *args, **namedargs):
+ '''Calculate the current children of the Node as list of Nodes
+ '''
+ MFNODES, SFNODES = self.getChildrenNames( )
+ temp = []
+ for key in MFNODES:
+ try:
+ temp.extend( self.__getattr__( key, default=0 ) )
+ except AttributeError:
+ pass
+ for key in SFNODES:
+ try:
+ temp.append( self.__getattr__(key, default = 0 ) )
+ except AttributeError:
+ pass
+ return temp
+ def clone(self, newclass=None, name=None, children=None, attrDeepCopy=1, *args, **namedargs):
+ '''Return a copy of this Node
+ newclass -- object newClass or None
+ optionally use a different Prototype as base
+ name -- string DEFName or None or 1
+ if 1, copy from current
+ elif None, set to ""
+ else, set to passed value
+ children -- boolean copyChildren
+ if true, copy the children of this node
+ otherwise, skip children
+ attrDeepCopy -- boolean deepCopy
+ if true, use deepcopy
+ otherwise, use copy
+ '''
+ if attrDeepCopy:
+ cpy = copy.deepcopy
+ else:
+ cpy = copy.copy
+ newattrs = self.attributeDictionary.copy()
+ if not children:
+ mnames,snames = self.getChildrenNames( )
+ for key in mnames+snames:
+ try:
+ del(newattrs[key])
+ except KeyError:
+ pass
+ for key, val in newattrs.items():
+ if type(val) in typeclasses.MutableTypes:
+ newattrs[key] = cpy(val)
+ # following is Node specific, won't work for sceneGraphs, scripts, etceteras
+ if name == 1: # asked to copy the name
+ name = self.DEF
+ elif name is None: # asked to clear the name
+ name = ''
+ if not newclass:
+ newclass = self.PROTO
+ return newclass( name, newattrs )
+ def __cmp__( self, other, stop=None ):
+ ''' Compare this node to another object/node
+ other -- object otherNode
+ stop -- boolean stopIfFailure
+ if true, failure to find comparison causes match failure (i.e. considered unequal)
+ '''
+
+ if hasattr( other, '__gi__') and other.__gi__ == self.__gi__:
+ try:
+ return cmp( self.DEF, other.DEF) or cmp( self.attributeDictionary, other.attributeDictionary )
+ except:
+ if not stop:
+ try:
+ return other.__cmp__( self , 1) # 1 being stop...
+ except:
+ pass
+ return -1 # could be one, doesn't really matter
+
+def Script( name="", attrDict=None, fieldDict=None, defaultDict=None, eventDict=None, **namedarguments):
+ ''' Create a script node (and associated prototype)
+ name -- string DEFName
+ attrDict -- string name: object value
+ see class Node.attributeDictionary
+ fieldDict -- string name: (string name, string dataType, boolean exposure)
+ see class Prototype.fieldDictionary
+ defaultDict -- string name: object value
+ see class Prototype.defaultDictionary
+ eventDict -- string name: (string name, string dataType, boolean eventOut)
+ '''
+ fieldDictionary = {
+ 'directOutput':('directOutput', 'SFBool',0),
+ 'url':('url',"MFString",0),
+ 'mustEvaluate':('mustEvaluate', 'SFBool',0),
+ }
+ fieldDictionary.update( fieldDict or {})
+ defaultDictionary = {
+ "directOutput":0,
+ "url":[],
+ "mustEvaluate":0,
+ }
+ defaultDictionary.update( defaultDict or {})
+ PROTO = Prototype(
+ "Script",
+ fieldDictionary,
+ defaultDictionary ,
+ eventDict = eventDict,
+ )
+ if attrDict is not None:
+ attrDict.update( namedarguments )
+ else:
+ attrDict = namedarguments
+ return PROTO( name, attrDict )
+
+
+class NullNode:
+ '''NULL SFNode value
+ There should only be a single NULL instance for
+ any particular system. It should, for all intents and
+ purposes just sit there inertly
+ '''
+ __gi__ = 'NULL'
+ DEF = ''
+ __walker_is_temporary_item__ = 1 # hacky signal to walking engine not to reject this node as already processed
+ def __repr__(self):
+ return '<NULL vrml SFNode>'
+ def __vrmlStr__(self,*args,**namedargs):
+ return ' NULL '
+ toString = __vrmlStr__
+ def __nonzero__(self ):
+ return 0
+ def __call__(self, *args, **namedargs):
+ return self
+ def __cmp__( self, other ):
+ if hasattr( other, '__gi__') and other.__gi__ == self.__gi__:
+ return 0
+ return -1 # could be one, doesn't really matter
+ def clone( self ):
+ return self
+NULL = NullNode()
+
+class fieldRef:
+ '''IS Prototype field reference
+ '''
+ __gi__ = 'IS'
+ DEF = ''
+ def __init__(self, declaredName):
+ self.declaredName = declaredName
+ def __repr__(self):
+ return 'IS %s'%self.declaredName
+ def __vrmlStr__(self,*args,**namedargs):
+ return 'IS %s'%self.declaredName
+ toString = __vrmlStr__
+ def __cmp__( self, other ):
+ if hasattr( other, '__gi__') and other.__gi__ == self.__gi__:
+ return cmp( self.declaredName, other.declaredName )
+ return -1 # could be one, doesn't really matter
+ def clone( self ):
+ return self.__class__( self.declaredName )
+
+IS = fieldRef
+
+class ROUTE:
+ ''' VRML 97 ROUTE object
+ The ROUTE object keeps track of its source and destination nodes and attributes
+ It generally lives in a sceneGraph's "routes" collection
+ '''
+ __gi__ = 'ROUTE'
+ def __init__( self, fromNode, fromField, toNode, toField ):
+ if type(fromNode) is types.StringType:
+ raise TypeError( "String value for ROUTE fromNode",fromNode)
+ if type(toNode) is types.StringType:
+ raise TypeError( "String value for ROUTE toNode",toNode)
+ self.fromNode = fromNode
+ self.fromField = fromField
+ self.toNode = toNode
+ self.toField = toField
+ def __getitem__( self, index ):
+ return (self.fromNode, self.fromField, self.toNode, self.toField)[index]
+ def __setitem__( self, index, value ):
+ attribute = ("fromNode","fromField","toNode", "toField")[index]
+ setattr( self, attribute, value )
+ def __repr__( self ):
+ return 'ROUTE %s.%s TO %s.%s'%( self.fromNode.DEF, self.fromField, self.toNode.DEF, self.toField )
+ def clone( self ):
+ return self.__class__(
+ self.fromNode,
+ self.fromField,
+ self.toNode,
+ self.toField,
+ )
+
+
+class sceneGraph(baseProto):
+ ''' A VRML 97 sceneGraph
+ Attributes:
+ __gi__ -- constant string "sceneGraph"
+ DEF -- constant string ""
+ children -- Node list
+ List of the root children of the sceneGraph, nodes/scripts only
+ routes -- ROUTE list
+ List of the routes within the sceneGraph
+ defNames -- string DEFName: Node node
+ Mapping of DEF names to their respective nodes
+ protoTypes -- Namespace prototypes
+ Namespace (with chaining lookup) collection of prototypes
+ getattr( sceneGraph.protoTypes, 'nodeGI' ) retrieves a prototype
+ '''
+ __gi__ = 'sceneGraph'
+ DEF = ''
+ def __init__(self, root=None, protoTypes=None, routes=None, defNames=None, children=None, *args, **namedargs):
+ '''
+ root -- sceneGraph root or Dictionary root or Module root or None
+ Base object for root of protoType namespace hierarchy
+ protoTypes -- string nodeGI: Prototype PROTO
+ Dictionary of prototype definitions
+ routes -- ROUTE list or (string sourcenode, string sourceeventOut, string destinationnode, string destinationeventOut) list
+ List of route objects or tuples to be added to the sceneGraph
+ see attribute routes
+ defNames -- string DEFName: Node node
+ see attribute defNames
+ children -- Node list
+ see attribute children
+ '''
+ if children is None:
+ self.children = []
+ else:
+ self.children = children
+ if routes is None:
+ self.routes = [] # how will we efficiently handle routes?
+ else:
+ self.routes = routes
+ if defNames == None:
+ self.defNames = {} # maps 'defName':Node
+ else:
+ self.defNames = defNames
+ if protoTypes is None:
+ protoTypes = {}
+ if root is None:
+ from vrml import basenodes # XXX
+ self.protoTypes = namespace.NameSpace(
+ protoTypes,
+ children = [namespace.NameSpace(basenodes)]
+ )
+ else: # there is a root file, so need to use it as the children instead of basenodes...
+ if hasattr( root, "protoTypes"):
+ self.protoTypes = namespace.NameSpace(
+ protoTypes,
+ children = [root.protoTypes]
+ )
+ else:
+ self.protoTypes = namespace.NameSpace(
+ protoTypes,
+ children = [ namespace.NameSpace(root) ]
+ )
+ def __getinitargs__( self ):
+ # we only copy our explicit protos, our routes, our defNames, and our children
+ # inherited protos will be pulled along by their nodes...
+ return None, self.protoTypes._base, self.routes, self.defNames, self.children
+ def __getstate__( self ):
+ return {}
+ def __setstate__( self, dict ):
+ pass
+ def __del__( self, id=id ):
+ '''
+ Need to clean up the namespace's mutual references,
+ this can be done without affecting the cascade by just
+ eliminating the key/value pairs. The namespaces will
+ no longer contain the prototypes, but they will still
+ chain up to the higher-level namespaces, and the nodes
+ will have those prototypes still in use.
+ '''
+## print 'del sceneGraph', id(self )
+ try:
+## import pdb
+## pdb.set_trace()
+## self.protoTypes.__dict__.clear()
+ self.protoTypes._base.clear()
+ del self.protoTypes.__namespace_cascade__[:]
+ except:
+ print 'unable to free references'
+
+ def addRoute(self, routeTuple, getNewNodes=0):
+ ''' Add a single route to the sceneGraph
+ routeTuple -- ROUTE route or (string sourcenode, string sourceeventOut, string destinationnode, string destinationeventOut)
+ getNewNodes -- boolean getNewNodes
+ if true, look up sourcenode and destinationnode within the current defNames to determine source/destination nodes
+ otherwise, just use current if available
+ '''
+ # create and wire together the Routes here,
+ # should just be a matter of pulling the events and passing the nodes...
+## import pdb
+## pdb.set_trace()
+ if type( routeTuple) in ( types.TupleType, types.ListType):
+ (fromNode, fromField, toNode, toField ) = routeTuple
+ if type(fromNode) is types.StringType:
+ # get the node instead of the string...
+ if self.defNames.has_key( fromNode ):
+ fromNode = self.defNames[fromNode]
+ else:
+ err.err( "ROUTE from an unknown node %s "%(routeTuple) )
+ return 0
+ if type(toNode) is types.StringType:
+ # get the node instead of the string...
+ if self.defNames.has_key( toNode ):
+ toNode = self.defNames[toNode]
+ else:
+ err.err( "ROUTE to an unknown node %s "%(routeTuple) )
+ return 0
+ routeTuple = ROUTE( fromNode, fromField, toNode, toField)
+ elif getNewNodes:
+ # get the nodes with the same names...
+ if self.defNames.has_key( routeTuple[0].DEF ):
+ routeTuple[0] = self.defNames[routeTuple[0].DEF]
+ else:
+ err.err( "ROUTE from an unknown node %s "%(routeTuple) )
+ return 0
+ if self.defNames.has_key( routeTuple[2].DEF ):
+ routeTuple[2] = self.defNames[routeTuple[2].DEF]
+ else:
+ err.err( "ROUTE to an unknown node %s "%(routeTuple) )
+ return 0
+ # should be a Route node now, append to our ROUTE list...
+ self.routes.append(routeTuple)
+ return 1
+ def regDefName(self, defName, object):
+ ''' Register a DEF name for a particular object
+ defName -- string DEFName
+ object -- Node node
+ '''
+ object.DEF = defName
+ self.defNames[defName] = object
+ def addProto(self, proto):
+ '''Register a Prototype for this sceneGraph
+ proto -- Prototype PROTO
+ '''
+ setattr( self.protoTypes, proto.__gi__, proto )
+ #toString = __vrmlStr__
+ #__vrmlStr__ = toString
+## def __setattr__( self, key, value ):
+## if key == 'protoTypes' and type( value) is types.ListType:
+## import pdb
+## pdb.set_trace()
+## raise TypeError( "Invalid type for protoTypes attribute of sceneGraph %s"%(`value`) )
+## else:
+## self.__dict__[key] = value
+
+DEFAULTFIELDVALUES ={
+ "SFBool": 0,
+ "SFString": "",
+ "SFFloat": 0,
+ "SFTime": 0,
+ "SFVec3f": (0, 0,0),
+ "SFVec2f": (0,0),
+ "SFRotation": (0, 1,0, 0),
+ "SFInt32": 0,
+ "SFImage": (0,0,0),
+ "SFColor": (0,0, 0),
+ "SFNode": NULL,
+ "MFString": [],
+ "MFFloat": [],
+ "MFTime": [],
+ "MFVec3f": [],
+ "MFVec2f": [],
+ "MFRotation": [],
+ "MFInt32": [],
+ "MFColor": [],
+ "MFNode": [],
+}
+
+
+
diff --git a/intern/python/modules/vrml/utils/__init__.py b/intern/python/modules/vrml/utils/__init__.py
new file mode 100644
index 00000000000..9d708a9084c
--- /dev/null
+++ b/intern/python/modules/vrml/utils/__init__.py
@@ -0,0 +1 @@
+"""utilities"""
diff --git a/intern/python/modules/vrml/utils/collapse.py b/intern/python/modules/vrml/utils/collapse.py
new file mode 100644
index 00000000000..25da50c2adb
--- /dev/null
+++ b/intern/python/modules/vrml/utils/collapse.py
@@ -0,0 +1,169 @@
+'''
+Destructive Functions for "collapsing" Sequences into single levels
+
+>>> from mcf.utils import collapse
+
+>>> collapse.test([[[1],[2,3]],[[]],[4],5,[6]])
+
+[1, 2, 3, 4, 5, 6] # note that is the same root list
+
+>>> collapse.collapse2([[[1],[2,3]],[[]],(4,()),(5,),[6]])
+
+[1, 2, 3, 4, 5, 6] # note is the same root list
+'''
+import copy, types, sys
+from types import ListType, TupleType # this now only supports the obsolete stuff...
+
+def hyperCollapse( inlist, allowedmap, type=type, list=list, itype=types.InstanceType, maxint= sys.maxint):
+ '''
+ Destructively flatten a mixed hierarchy to a single level.
+ Non-recursive, many speedups and obfuscations by Tim Peters :)
+ '''
+ try:
+ # for every possible index
+ for ind in xrange( maxint):
+ # while that index currently holds a list
+ expandable = 1
+ while expandable:
+ expandable = 0
+ if allowedmap.has_key( type(inlist[ind]) ):
+ # expand that list into the index (and subsequent indicies)
+ inlist[ind:ind+1] = list( inlist[ind])
+ expandable = 1
+
+ # alternately you could iterate through checking for isinstance on all possible
+ # classes, but that would be very slow
+ elif type( inlist[ind] ) is itype and allowedmap.has_key( inlist[ind].__class__ ):
+ # here figure out some way to generically expand that doesn't risk
+ # infinite loops...
+ templist = []
+ for x in inlist[ind]:
+ templist.append( x)
+ inlist[ind:ind+1] = templist
+ expandable = 1
+ except IndexError:
+ pass
+ return inlist
+
+
+def collapse(inlist, type=type, ltype=types.ListType, maxint= sys.maxint):
+ '''
+ Destructively flatten a list hierarchy to a single level.
+ Non-recursive, and (as far as I can see, doesn't have any
+ glaring loopholes).
+ Further speedups and obfuscations by Tim Peters :)
+ '''
+ try:
+ # for every possible index
+ for ind in xrange( maxint):
+ # while that index currently holds a list
+ while type(inlist[ind]) is ltype:
+ # expand that list into the index (and subsequent indicies)
+ inlist[ind:ind+1] = inlist[ind]
+ #ind = ind+1
+ except IndexError:
+ pass
+ return inlist
+
+def collapse_safe(inlist):
+ '''
+ As collapse, but works on a copy of the inlist
+ '''
+ return collapse( inlist[:] )
+
+def collapse2(inlist, ltype=(types.ListType, types.TupleType), type=type, maxint= sys.maxint ):
+ '''
+ Destructively flatten a list hierarchy to a single level.
+ Will expand tuple children as well, but will fail if the
+ top level element is not a list.
+ Non-recursive, and (as far as I can see, doesn't have any
+ glaring loopholes).
+ '''
+ ind = 0
+ try:
+ while 1:
+ while type(inlist[ind]) in ltype:
+ try:
+ inlist[ind:ind+1] = inlist[ind]
+ except TypeError:
+ inlist[ind:ind+1] = list(inlist[ind])
+ ind = ind+1
+ except IndexError:
+ pass
+ return inlist
+
+def collapse2_safe(inlist):
+ '''
+ As collapse2, but works on a copy of the inlist
+ '''
+ return collapse( list(inlist) )
+
+def old_buggy_collapse(inlist):
+ '''Always return a one-level list of all the non-list elements in listin,
+ rewritten to be non-recursive 96-12-28 Note that the new versions work
+ on the original list, not a copy of the original.'''
+ if type(inlist)==TupleType:
+ inlist = list(inlist)
+ elif type(inlist)!=ListType:
+ return [inlist]
+ x = 0
+ while 1:
+ try:
+ y = inlist[x]
+ if type(y) == ListType:
+ ylen = len(y)
+ if ylen == 1:
+ inlist[x] = y[0]
+ if type(inlist[x]) == ListType:
+ x = x - 1 # need to collapse that list...
+ elif ylen == 0:
+ del(inlist[x])
+ x = x-1 # list has been shortened
+ else:
+ inlist[x:x+1]=y
+ x = x+1
+ except IndexError:
+ break
+ return inlist
+
+
+def old_buggy_collapse2(inlist):
+ '''As collapse, but also collapse tuples, rewritten 96-12-28 to be non-recursive'''
+ if type(inlist)==TupleType:
+ inlist = list(inlist)
+ elif type(inlist)!=ListType:
+ return [inlist]
+ x = 0
+ while 1:
+ try:
+ y = inlist[x]
+ if type(y) in [ListType, TupleType]:
+ ylen = len(y)
+ if ylen == 1:
+ inlist[x] = y[0]
+ if type(inlist[x]) in [ListType,TupleType]:
+ x = x-1 #(to deal with that element)
+ elif ylen == 0:
+ del(inlist[x])
+ x = x-1 # list has been shortened, will raise exception with tuples...
+ else:
+ inlist[x:x+1]=list(y)
+ x = x+1
+ except IndexError:
+ break
+ return inlist
+
+
+def oldest_buggy_collapse(listin):
+ 'Always return a one-level list of all the non-list elements in listin'
+ if type(listin) == ListType:
+ return reduce(lambda x,y: x+y, map(collapse, listin), [])
+ else: return [listin]
+
+def oldest_buggy_collapse2(seqin):
+
+ if type(seqin) in [ListType, TupleType]:
+ return reduce(lambda x,y: x+y, map(collapse2, seqin), [])
+ else:
+ return [seqin]
+
diff --git a/intern/python/modules/vrml/utils/err.py b/intern/python/modules/vrml/utils/err.py
new file mode 100644
index 00000000000..3c6591a6873
--- /dev/null
+++ b/intern/python/modules/vrml/utils/err.py
@@ -0,0 +1,37 @@
+'''
+err.py Encapsulated writing to sys.stderr
+
+The idea of this module is that, for a GUI system (or a more advanced UI),
+you can just import a different err module (or object) and keep
+your code the same. (For instance, you often want a status window
+which flashes warnings and info, and have error messages pop up an
+alert to get immediate attention.
+'''
+
+import sys
+
+def err(message, Code=0):
+ '''
+ report an error, with an optional error code
+ '''
+ if Code:
+ sys.stderr.write('Error #%i: %s\n'%(Code,message))
+ else:
+ sys.stderr.write('Error: %s\n'%message)
+def warn(message, Code=0):
+ '''
+ report a warning, with an optional error code
+ '''
+ if Code:
+ sys.stderr.write('Warning #%i: %s\n'%(Code,message))
+ else:
+ sys.stderr.write('Warning: %s\n'%message)
+def info(message, Code=0):
+ '''
+ report information/status, with an optional error code
+ '''
+ if Code:
+ sys.stderr.write('Info #%i: %s\n'%(Code,message))
+ else:
+ sys.stderr.write('Info: %s\n'%message)
+
diff --git a/intern/python/modules/vrml/utils/namespace.py b/intern/python/modules/vrml/utils/namespace.py
new file mode 100644
index 00000000000..dd9f0b7dea6
--- /dev/null
+++ b/intern/python/modules/vrml/utils/namespace.py
@@ -0,0 +1,225 @@
+'''
+NameSpace v0.04:
+
+A "NameSpace" is an object wrapper around a _base dictionary
+which allows chaining searches for an 'attribute' within that
+dictionary, or any other namespace which is defined as part
+of the search path (depending on the downcascade variable, is
+either the hier-parents or the hier-children).
+
+You can assign attributes to the namespace normally, and read
+them normally. (setattr, getattr, a.this = that, a.this)
+
+I use namespaces for writing parsing systems, where I want to
+differentiate between sources (have multiple sources that I can
+swap into or out of the namespace), but want to be able to get
+at them through a single interface. There is a test function
+which gives you an idea how to use the system.
+
+In general, call NameSpace(someobj), where someobj is a dictionary,
+a module, or another NameSpace, and it will return a NameSpace which
+wraps up the keys of someobj. To add a namespace to the NameSpace,
+just call the append (or hier_addchild) method of the parent namespace
+with the child as argument.
+
+### NOTE: if you pass a module (or anything else with a dict attribute),
+names which start with '__' will be removed. You can avoid this by
+pre-copying the dict of the object and passing it as the arg to the
+__init__ method.
+
+### NOTE: to properly pickle and/or copy module-based namespaces you
+will likely want to do: from mcf.utils import extpkl, copy_extend
+
+### Changes:
+ 97.05.04 -- Altered to use standard hierobj interface, cleaned up
+ interface by removing the "addparent" function, which is reachable
+ by simply appending to the __parent__ attribute, though normally
+ you would want to use the hier_addchild or append functions, since
+ they let both objects know about the addition (and therefor the
+ relationship will be restored if the objects are stored and unstored)
+
+ 97.06.26 -- Altered the getattr function to reduce the number of
+ situations in which infinite lookup loops could be created
+ (unfortunately, the cost is rather high). Made the downcascade
+ variable harden (resolve) at init, instead of checking for every
+ lookup. (see next note)
+
+ 97.08.29 -- Discovered some _very_ weird behaviour when storing
+ namespaces in mcf.store dbases. Resolved it by storing the
+ __namespace_cascade__ attribute as a normal attribute instead of
+ using the __unstore__ mechanism... There was really no need to
+ use the __unstore__, but figuring out how a functions saying
+ self.__dict__['__namespace_cascade__'] = something
+ print `self.__dict__['__namespace_cascade__']` can print nothing
+ is a bit beyond me. (without causing an exception, mind you)
+
+ 97.11.15 Found yet more errors, decided to make two different
+ classes of namespace. Those based on modules now act similar
+ to dummy objects, that is, they let you modify the original
+ instead of keeping a copy of the original and modifying that.
+
+ 98.03.15 -- Eliminated custom pickling methods as they are no longer
+ needed for use with Python 1.5final
+
+ 98.03.15 -- Fixed bug in items, values, etceteras with module-type
+ base objects.
+'''
+import copy, types, string
+
+from mcf.utils import hierobj
+
+class NameSpace(hierobj.Hierobj):
+ '''
+ An hierarchic NameSpace, allows specification of upward or downward
+ chaining search for resolving names
+ '''
+ def __init__(self, val = None, parents=None, downcascade=1,children=[]):
+ '''
+ A NameSpace can be initialised with a dictionary, a dummied
+ dictionary, another namespace, or something which has a __dict__
+ attribute.
+ Note that downcascade is hardened (resolved) at init, not at
+ lookup time.
+ '''
+ hierobj.Hierobj.__init__(self, parents, children)
+ self.__dict__['__downcascade__'] = downcascade # boolean
+ if val is None:
+ self.__dict__['_base'] = {}
+ else:
+ if type( val ) == types.StringType:
+ # this is a reference to a module which has been pickled
+ val = __import__( val, {},{}, string.split( val, '.') )
+ try:
+ # See if val's a dummy-style object which has a _base
+ self.__dict__['_base']=copy.copy(val._base)
+ except (AttributeError,KeyError):
+ # not a dummy-style object... see if it has a dict attribute...
+ try:
+ if type(val) != types.ModuleType:
+ val = copy.copy(val.__dict__)
+ except (AttributeError, KeyError):
+ pass
+ # whatever val is now, it's going to become our _base...
+ self.__dict__['_base']=val
+ # harden (resolve) the reference to downcascade to speed attribute lookups
+ if downcascade: self.__dict__['__namespace_cascade__'] = self.__childlist__
+ else: self.__dict__['__namespace_cascade__'] = self.__parent__
+ def __setattr__(self, var, val):
+ '''
+ An attempt to set an attribute should place the attribute in the _base
+ dictionary through a setitem call.
+ '''
+ # Note that we use standard attribute access to allow ObStore loading if the
+ # ._base isn't yet available.
+ try:
+ self._base[var] = val
+ except TypeError:
+ setattr(self._base, var, val)
+ def __getattr__(self,var):
+## print '__getattr__', var
+ return self.__safe_getattr__(var, {}) # the {} is a stopdict
+
+ def __safe_getattr__(self, var,stopdict):
+ '''
+ We have a lot to do in this function, if the attribute is an unloaded
+ but stored attribute, we need to load it. If it's not in the stored
+ attributes, then we need to load the _base, then see if it's in the
+ _base.
+ If it's not found by then, then we need to check our resource namespaces
+ and see if it's in them.
+ '''
+ # we don't have a __storedattr__ or it doesn't have this key...
+ if var != '_base':
+ try:
+ return self._base[var]
+ except (KeyError,TypeError), x:
+ try:
+ return getattr(self._base, var)
+ except AttributeError:
+ pass
+ try: # with pickle, it tries to get the __setstate__ before restoration is complete
+ for cas in self.__dict__['__namespace_cascade__']:
+ try:
+ stopdict[id(cas)] # if succeeds, we've already tried this child
+ # no need to do anything, if none of the children succeeds we will
+ # raise an AttributeError
+ except KeyError:
+ stopdict[id(cas)] = None
+ return cas.__safe_getattr__(var,stopdict)
+ except (KeyError,AttributeError):
+ pass
+ raise AttributeError, var
+ def items(self):
+ try:
+ return self._base.items()
+ except AttributeError:
+ pass
+ try:
+ return self._base.__dict__.items()
+ except AttributeError:
+ pass
+ def keys(self):
+ try:
+ return self._base.keys()
+ except AttributeError:
+ pass
+ try:
+ return self._base.__dict__.keys()
+ except AttributeError:
+ pass
+ def has_key( self, key ):
+ try:
+ return self._base.has_key( key)
+ except AttributeError:
+ pass
+ try:
+ return self._base.__dict__.has_key( key)
+ except AttributeError:
+ pass
+ def values(self):
+ try:
+ return self._base.values()
+ except AttributeError:
+ pass
+ try:
+ return self._base.__dict__.values()
+ except AttributeError:
+ pass
+
+ def __getinitargs__(self):
+ if type( self._base ) is types.ModuleType:
+ base = self._base.__name__
+ else:
+ base = self._base
+ return (base, self.__parent__, self.__downcascade__, self.__childlist__)
+ def __getstate__(self):
+ return None
+ def __setstate__(self,*args):
+ pass
+ def __deepcopy__(self, memo=None):
+ d = id(self)
+ if memo is None:
+ memo = {}
+ elif memo.has_key(d):
+ return memo[d]
+ if type(self._base) == types.ModuleType:
+ rest = tuple(map( copy.deepcopy, (self.__parent__, self.__downcascade__, self.__childlist__) ))
+ new = apply(self.__class__, (self._base,)+rest )
+ else:
+ new = tuple(map( copy.deepcopy, (self._base, self.__parent__, self.__downcascade__, self.__childlist__) ))
+ return new
+## def __del__( self, id=id ):
+## print 'del namespace', id( self )
+
+
+def test():
+ import string
+ a = NameSpace(string)
+ del(string)
+ a.append(NameSpace({'a':23,'b':42}))
+ import math
+ a.append(NameSpace(math))
+ print 'The returned object should allow access to the attributes of the string,\nand math modules, and two simple variables "a" and "b" (== 23 and42 respectively)'
+ return a
+
+
diff --git a/intern/python/modules/vrml/utils/typeclasses.py b/intern/python/modules/vrml/utils/typeclasses.py
new file mode 100644
index 00000000000..ed798dfe3da
--- /dev/null
+++ b/intern/python/modules/vrml/utils/typeclasses.py
@@ -0,0 +1,50 @@
+'''
+Classes of Types
+
+Often you want to be able to say:
+ if type(obj) in MutableTypes:
+ yada
+
+This module is intended to make that easier.
+Just import and use :)
+'''
+import types
+
+MutableTypes = [ types.ListType, types.DictType, types.InstanceType ]
+MutableSequenceTypes = [ types.ListType ]
+SequenceTypes = [ types.ListType, types.StringType, types.TupleType ]
+NumericTypes = [ types.IntType, types.FloatType, types.LongType, types.ComplexType ]
+MappingTypes = [ types.DictType ]
+
+def regarray():
+ if globals().has_key('array'):
+ return 1
+ try:
+ import array
+ SequenceTypes.append( array.ArrayType )
+ MutableTypes.append( array.ArrayType )
+ MutableSequenceTypes.append( array.ArrayType )
+ return 1
+ except ImportError:
+ return 0
+
+def regnumpy():
+ '''
+ Call if you want to register numpy arrays
+ according to their types.
+ '''
+ if globals().has_key('Numeric'):
+ return 1
+ try:
+ import Numeric
+ SequenceTypes.append( Numeric.ArrayType )
+ MutableTypes.append( Numeric.ArrayType )
+ MutableSequenceTypes.append( Numeric.ArrayType )
+ return 1
+ except ImportError:
+ return 0
+
+# for now, I'm going to always register these, if the module becomes part of the base distribution
+# it might be better to leave it out so numpy isn't always getting loaded...
+regarray()
+regnumpy() \ No newline at end of file