Welcome to mirror list, hosted at ThFree Co, Russian Federation.

git.blender.org/blender.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorCampbell Barton <ideasman42@gmail.com>2008-12-22 18:05:07 +0300
committerCampbell Barton <ideasman42@gmail.com>2008-12-22 18:05:07 +0300
commit61587627975c4528f0d61e9f30f3d7589c6d6c35 (patch)
tree6dd39755a0ea0eb52f555abe5ec2fbe974c7fd72 /release
parent601fb5e6fab8ab47863201654403851fb723f69b (diff)
WIP - support for VRML PROTO's
Diffstat (limited to 'release')
-rwxr-xr-xrelease/scripts/import_web3d.py569
1 files changed, 371 insertions, 198 deletions
diff --git a/release/scripts/import_web3d.py b/release/scripts/import_web3d.py
index 06cde898ef2..a4b367b6df8 100755
--- a/release/scripts/import_web3d.py
+++ b/release/scripts/import_web3d.py
@@ -201,6 +201,7 @@ def vrmlFormat(data):
NODE_NORMAL = 1 # {}
NODE_ARRAY = 2 # []
NODE_REFERENCE = 3 # USE foobar
+# NODE_PROTO = 4 #
lines = []
@@ -211,6 +212,10 @@ def getNodePreText(i, words):
if i>=len(lines):
break
+ '''
+ elif lines[i].startswith('PROTO'):
+ return NODE_PROTO, i+1
+ '''
elif lines[i]=='{':
# words.append(lines[i]) # no need
# print "OK"
@@ -239,11 +244,45 @@ def getNodePreText(i, words):
# print "error value!!!", words
return 0, -1
-def is_nodeline(i, words):
+def is_protoline(i, words, proto_field_defs):
+
+ node_type = NODE_NORMAL
+ # print "PRPTOTOOOO ---- test", i, lines[i]
+ if not lines[i].startswith('PROTO' ):
+ # print 'not a proto'
+ return node_type, -1
+
+ words[:] = lines[i].split() # TODO - maybe multiline proto's exist?
+
+ i+=1
+ if lines[i].startswith('['): # ']\n'
+ while not lines[i].startswith(']'):
+ proto_field_defs.append(lines[i].split())
+ i+=1
+ i+=1 # advance past the ']\n'
+
+ if lines[i].startswith('{'):
+ node_type = NODE_NORMAL
+ i+=1 # advance past the '{\n'
+
+ elif lines[i].startswith('['): # does this even exist??, add incase
+ node_type = NODE_ARRAY
+ i+=1 # advance past the '{\n'
+
+ print "IS PROTO", i, node_type
+ return node_type, i
+
+def is_nodeline(i, words, proto_field_defs):
if not lines[i][0].isalpha():
return 0, 0
+ # Is this a prototype??
+ proto_type, new_i = is_protoline(i, words, proto_field_defs)
+ if new_i != -1:
+ return proto_type, new_i
+
+
# Simple "var [" type
if lines[i+1] == '[':
if lines[i].count('"') % 2 == 0:
@@ -253,6 +292,7 @@ def is_nodeline(i, words):
node_type, new_i = getNodePreText(i, words)
if not node_type:
+ print "not node_type", lines[i]
return 0, 0
# Ok, we have a { after some values
@@ -311,10 +351,10 @@ def is_numline(i):
return True
except:
return False
-
+
class vrmlNode(object):
- __slots__ = 'id', 'fields', 'node_type', 'parent', 'children', 'parent', 'array_data', 'reference', 'lineno', 'filename', 'blendObject', 'DEF_NAMESPACE', 'ROUTE_IPO_NAMESPACE', 'FIELD_NAMESPACE', 'x3dNode'
+ __slots__ = 'id', 'fields', 'proto_field_defs', 'proto_fields', 'node_type', 'parent', 'children', 'parent', 'array_data', 'reference', 'lineno', 'filename', 'blendObject', 'DEF_NAMESPACE', 'ROUTE_IPO_NAMESPACE', 'PROTO_NAMESPACE', 'x3dNode'
def __init__(self, parent, node_type, lineno):
self.id = None
self.node_type = node_type
@@ -333,7 +373,12 @@ class vrmlNode(object):
# Store in the root node because each inline file needs its own root node and its own namespace
self.DEF_NAMESPACE = None
self.ROUTE_IPO_NAMESPACE = None
+ '''
self.FIELD_NAMESPACE = None
+ '''
+
+
+ self.PROTO_NAMESPACE = None
self.reference = None
@@ -343,16 +388,25 @@ class vrmlNode(object):
return
self.fields = [] # fields have no order, in some cases rool level values are not unique so dont use a dict
+ self.proto_field_defs = [] # proto field definition eg: "field SFColor seatColor .6 .6 .1"
+ self.proto_fields = [] # proto field usage "diffuseColor IS seatColor"
self.children = []
self.array_data = [] # use for arrays of data - should only be for NODE_ARRAY types
# Only available from the root node
+ '''
def getFieldDict(self):
if self.FIELD_NAMESPACE != None:
return self.FIELD_NAMESPACE
else:
return self.parent.getFieldDict()
+ '''
+ def getProtoDict(self):
+ if self.PROTO_NAMESPACE != None:
+ return self.PROTO_NAMESPACE
+ else:
+ return self.parent.getProtoDict()
def getDefDict(self):
if self.DEF_NAMESPACE != None:
@@ -368,9 +422,10 @@ class vrmlNode(object):
def setRoot(self, filename):
self.filename = filename
- self.FIELD_NAMESPACE = {}
+ # self.FIELD_NAMESPACE = {}
self.DEF_NAMESPACE = {}
self.ROUTE_IPO_NAMESPACE = {}
+ self.PROTO_NAMESPACE = {}
def isRoot(self):
if self.filename == None:
@@ -404,15 +459,17 @@ class vrmlNode(object):
return self.id[0]
return None
- def getDefName(self):
+ def getSpecialTypeName(self, typename):
self_real = self.getRealNode()
-
- if 'DEF' in self_real.id:
- # print self_real.id
- return self_real.id[ list(self_real.id).index('DEF')+1 ]
- else:
- return None
-
+ try: return self_real.id[ list(self_real.id).index(typename)+1 ]
+ except: return None
+
+
+ def getDefName(self):
+ return self.getSpecialTypeName('DEF')
+
+ def getProtoName(self):
+ return self.getSpecialTypeName('PROTO')
def getChildrenBySpec(self, node_spec): # spec could be Transform, Shape, Appearance
self_real = self.getRealNode()
@@ -449,7 +506,21 @@ class vrmlNode(object):
ancestry.append(self)
for child in self.getRealNode().children:
if child not in ancestry:
- child.getSerialized(results, ancestry)
+ # We dont want to load proto's, they are only references
+ # We could enforce this elsewhere
+
+ # Only add this in a very special case
+ # where the parent of this object is not the real parent
+ # - In this case we have added the proto as a child to a node instancing it.
+ # This is a bit arbitary, but its how Proto's are done with this importer.
+ if child.getProtoName() == None:
+ child.getSerialized(results, ancestry)
+ else:
+ print "\n\n\n\nTEST!!!!!!!!!!!!!"
+ print child.getProtoName(), self.getSpec()
+ if child.getProtoName()==self.getSpec():
+ print "PROTO!!!!!!!!!!!!!!!!\n"
+ child.getSerialized(results, ancestry)
return results
@@ -462,7 +533,7 @@ class vrmlNode(object):
child.searchNodeTypeID(node_spec, results)
return results
- def getFieldName(self, field):
+ def getFieldName(self, field, ancestry):
self_real = self.getRealNode() # incase we're an instance
for f in self_real.fields:
@@ -470,14 +541,51 @@ class vrmlNode(object):
if f and f[0] == field:
# print '\tfound field', f
- return f[1:]
+ if len(f)>=3 and f[1] == 'IS': # eg: 'diffuseColor IS legColor'
+ field_id = f[2]
+
+ # print "\n\n\n\n\n\nFOND IS!!!"
+ f_proto_lookup = None
+ i = len(ancestry)
+ while i:
+ i -= 1
+ node = ancestry[i]
+
+ # Get the default value from the proto, this can be overwridden by the proto instace
+ # 'field SFColor legColor .8 .4 .7'
+ for f_def in node.proto_field_defs:
+ if len(f_def) >= 4:
+ if f_def[0]=='field' and f_def[2]==field_id:
+ f_proto_lookup = f_def[3:]
+
+ # Node instance, Will be 1 up from the proto-node in the ancestry list. but NOT its parent.
+ # This is the setting as defined by the instance, including this setting is optional,
+ # and will override the default PROTO value
+ # eg: 'legColor 1 0 0'
+ for f_def in node.fields:
+ # print "\n\n\n\n\nASSSSSSSS", f_def
+ if len(f_def) >= 2:
+ if f_def[0]==field_id:
+ f_proto_lookup = f_def[1:]
+ # print "\n\n\n\nWOOOOHOOO", field, f_proto_lookup
+
+ return f_proto_lookup
+ else:
+ # Not using a proto
+ return f[1:]
# print '\tfield not found', field
+
+
+ # See if this is a proto name
+
+
+
return None
- def getFieldAsInt(self, field, default):
+ def getFieldAsInt(self, field, default, ancestry):
self_real = self.getRealNode() # incase we're an instance
- f = self_real.getFieldName(field)
+ f = self_real.getFieldName(field, ancestry)
if f==None: return default
if ',' in f: f = f[:f.index(',')] # strip after the comma
@@ -491,10 +599,10 @@ class vrmlNode(object):
print '\tvalue "%s" could not be used as an int for field "%s"' % (f[0], field)
return default
- def getFieldAsFloat(self, field, default):
+ def getFieldAsFloat(self, field, default, ancestry):
self_real = self.getRealNode() # incase we're an instance
- f = self_real.getFieldName(field)
+ f = self_real.getFieldName(field, ancestry)
if f==None: return default
if ',' in f: f = f[:f.index(',')] # strip after the comma
@@ -508,10 +616,10 @@ class vrmlNode(object):
print '\tvalue "%s" could not be used as a float for field "%s"' % (f[0], field)
return default
- def getFieldAsFloatTuple(self, field, default):
+ def getFieldAsFloatTuple(self, field, default, ancestry):
self_real = self.getRealNode() # incase we're an instance
- f = self_real.getFieldName(field)
+ f = self_real.getFieldName(field, ancestry)
if f==None: return default
# if ',' in f: f = f[:f.index(',')] # strip after the comma
@@ -532,10 +640,10 @@ class vrmlNode(object):
print '\tvalue "%s" could not be used as a float tuple for field "%s"' % (f, field)
return default
- def getFieldAsBool(self, field, default):
+ def getFieldAsBool(self, field, default, ancestry):
self_real = self.getRealNode() # incase we're an instance
- f = self_real.getFieldName(field)
+ f = self_real.getFieldName(field, ancestry)
if f==None: return default
if ',' in f: f = f[:f.index(',')] # strip after the comma
@@ -551,10 +659,10 @@ class vrmlNode(object):
print '\t"%s" could not be used as a bool for field "%s"' % (f[1], field)
return default
- def getFieldAsString(self, field, default=None):
+ def getFieldAsString(self, field, default, ancestry):
self_real = self.getRealNode() # incase we're an instance
- f = self_real.getFieldName(field)
+ f = self_real.getFieldName(field, ancestry)
if f==None: return default
if len(f) < 1:
print '\t"%s" wrong length for string conversion for field "%s"' % (f, field)
@@ -576,7 +684,7 @@ class vrmlNode(object):
print '\tvalue "%s" could not be used as a string for field "%s"' % (f[0], field)
return default
- def getFieldAsArray(self, field, group):
+ def getFieldAsArray(self, field, group, ancestry):
'''
For this parser arrays are children
'''
@@ -593,7 +701,7 @@ class vrmlNode(object):
# For x3d, should work ok with vrml too
# for x3d arrays are fields, vrml they are nodes, annoying but not tooo bad.
- data_split = self.getFieldName(field)
+ data_split = self.getFieldName(field, ancestry)
if not data_split:
return []
array_data = ' '.join(data_split)
@@ -660,7 +768,7 @@ class vrmlNode(object):
return new_array
- def getFieldAsStringArray(self, field):
+ def getFieldAsStringArray(self, field, ancestry):
'''
Get a list of strings
'''
@@ -698,7 +806,7 @@ class vrmlNode(object):
def __repr__(self):
level = self.getLevel()
ind = ' ' * level
-
+ print self.id
if self.node_type==NODE_REFERENCE:
brackets = ''
elif self.node_type==NODE_NORMAL:
@@ -717,7 +825,7 @@ class vrmlNode(object):
text += ind + "(reference node)\n"
return text
- text += ind + 'FIELDS:\n'
+ text += ind + 'FIELDS:' + str(len(self.fields)) + '\n'
for i,item in enumerate(self.fields):
text += ind + 'FIELD:\n'
@@ -743,7 +851,10 @@ class vrmlNode(object):
# If we were an inline then try load the file
if self.node_type == NODE_NORMAL and self.getSpec() == 'Inline':
- url = self.getFieldAsString('url', None)
+
+ ancestry = [] # Warning! - PROTO's using this wont work at all.
+ url = self.getFieldAsString('url', None, ancestry)
+ del ancestry
if url != None:
urls = []
@@ -786,7 +897,7 @@ class vrmlNode(object):
lines.insert(0, 'root_node____')
lines.append('}')
'''
- ff = open('/test.txt', 'w')
+ ff = open('/root/test.txt', 'w')
ff.writelines([l+'\n' for l in lines])
'''
@@ -801,8 +912,10 @@ class vrmlNode(object):
return new_i
def __parse(self, i):
- # print 'parsing at', i,
- # print i, self.id, self.lineno
+ '''
+ print 'parsing at', i,
+ print i, self.id, self.lineno
+ '''
l = lines[i]
if l=='[':
@@ -811,9 +924,12 @@ class vrmlNode(object):
i+=1
else:
words = []
- node_type, new_i = is_nodeline(i, words)
+ proto_field_defs = []
+
+ node_type, new_i = is_nodeline(i, words, proto_field_defs)
if not node_type: # fail for parsing new node.
- raise "error"
+ print "Failed to parse new node"
+ raise ValueError
if self.node_type==NODE_REFERENCE:
# Only assign the reference and quit
@@ -824,38 +940,55 @@ class vrmlNode(object):
return new_i
self.id = tuple(words)
+ self.proto_field_defs[:] = proto_field_defs
# fill in DEF/USE
key = self.getDefName()
-
if key != None:
self.getDefDict()[ key ] = self
+ key = self.getProtoName()
+ proto_dict = self.getProtoDict()
+ if key != None:
+ proto_dict[ key ] = self
+
+ else: # If we're a proto instance, add the proto node as our child.
+ spec = self.getSpec()
+ try:
+ self.children.append( proto_dict[spec] )
+ #pass
+ except:
+ pass
+
+ del spec
+
+ del proto_dict, key
+
i = new_i
# print self.id
ok = True
while ok:
l = lines[i]
- # print '\t', i, l
+ # print '\tDEBUG:', i, self.node_type, l
if l=='':
i+=1
continue
if l=='}':
- if self.node_type != NODE_NORMAL:
- print 'wrong node ending, expected an } ' + str(i)
- raise ""
+ if self.node_type != NODE_NORMAL: # also ends proto nodes, we may want a type for these too.
+ print 'wrong node ending, expected an } ' + str(i) + ' ' + str(self.node_type)
+ raise ValueError
### print "returning", i
return i+1
if l==']':
if self.node_type != NODE_ARRAY:
- print 'wrong node ending, expected a ] ' + str(i)
- raise ""
+ print 'wrong node ending, expected a ] ' + str(i) + ' ' + str(self.node_type)
+ raise ValueError
### print "returning", i
return i+1
- node_type, new_i = is_nodeline(i, [])
+ node_type, new_i = is_nodeline(i, [], [])
if node_type: # check text\n{
### print '\t\tgroup', i
child = vrmlNode(self, node_type, i)
@@ -943,6 +1076,7 @@ class vrmlNode(object):
# this IS a key but the previous value was not a key, ot it was a defined field.
if (not iskey(field_context[-1])) or ((len(field_context)==3 and field_context[1]=='IS')):
field_list.append(field_context)
+
field_context = [value[j]]
else:
# The last item was not a value, multiple keys are needed in some cases.
@@ -965,17 +1099,26 @@ class vrmlNode(object):
if value[0]=='field':
# field SFFloat creaseAngle 4
+ ''' #oldproto
self.getFieldDict()[value[2]] = value[3:] # skip the first 3 values
+ '''
+ #self.proto_field_defs.append(value)
+ self.proto_field_defs.append(value)
else:
# Get referenced field
if len(value) >= 3 and value[1]=='IS':
+ self.fields.append(value)
+
+ '''
try:
value = [ value[0] ] + self.getFieldDict()[ value[2] ]
except:
print '\tWarning, field could not be found:', value, 'TODO add support for exposedField'
print '\t', self.getFieldDict()
self.fields.append(value)
+ '''
else:
+ print "ADDING '%s'" % value
self.fields.append(value)
i+=1
@@ -1014,13 +1157,14 @@ def vrml_parse(path):
lines.append('}')
# Use for testing our parsed output, so we can check on line numbers.
- '''
- ff = open('/test.txt', 'w')
+
+ ff = open('/root/test.txt', 'w')
ff.writelines([l+'\n' for l in lines])
- '''
+ ff.close()
+
# Now evaluate it
- node_type, new_i = is_nodeline(0, [])
+ node_type, new_i = is_nodeline(0, [], [])
if not node_type:
return None, 'Error: VRML file has no starting Node'
@@ -1195,12 +1339,12 @@ def translateScale(sca):
mat[2][2] = sca[2]
return mat
-def translateTransform(node):
- cent = node.getFieldAsFloatTuple('center', None) # (0.0, 0.0, 0.0)
- rot = node.getFieldAsFloatTuple('rotation', None) # (0.0, 0.0, 1.0, 0.0)
- sca = node.getFieldAsFloatTuple('scale', None) # (1.0, 1.0, 1.0)
- scaori = node.getFieldAsFloatTuple('scaleOrientation', None) # (0.0, 0.0, 1.0, 0.0)
- tx = node.getFieldAsFloatTuple('translation', None) # (0.0, 0.0, 0.0)
+def translateTransform(node, ancestry):
+ cent = node.getFieldAsFloatTuple('center', None, ancestry) # (0.0, 0.0, 0.0)
+ rot = node.getFieldAsFloatTuple('rotation', None, ancestry) # (0.0, 0.0, 1.0, 0.0)
+ sca = node.getFieldAsFloatTuple('scale', None, ancestry) # (1.0, 1.0, 1.0)
+ scaori = node.getFieldAsFloatTuple('scaleOrientation', None, ancestry) # (0.0, 0.0, 1.0, 0.0)
+ tx = node.getFieldAsFloatTuple('translation', None, ancestry) # (0.0, 0.0, 0.0)
if cent:
cent_mat = TranslationMatrix(Vector(cent)).resize4x4()
@@ -1232,11 +1376,11 @@ def translateTransform(node):
return new_mat
-def translateTexTransform(node):
- cent = node.getFieldAsFloatTuple('center', None) # (0.0, 0.0)
- rot = node.getFieldAsFloat('rotation', None) # 0.0
- sca = node.getFieldAsFloatTuple('scale', None) # (1.0, 1.0)
- tx = node.getFieldAsFloatTuple('translation', None) # (0.0, 0.0)
+def translateTexTransform(node, ancestry):
+ cent = node.getFieldAsFloatTuple('center', None, ancestry) # (0.0, 0.0)
+ rot = node.getFieldAsFloat('rotation', None, ancestry) # 0.0
+ sca = node.getFieldAsFloatTuple('scale', None, ancestry) # (1.0, 1.0)
+ tx = node.getFieldAsFloatTuple('translation', None, ancestry) # (0.0, 0.0)
if cent:
@@ -1279,17 +1423,17 @@ def getFinalMatrix(node, mtx, ancestry):
mtx = Matrix()
for node_tx in transform_nodes:
- mat = translateTransform(node_tx)
+ mat = translateTransform(node_tx, ancestry)
mtx = mtx * mat
return mtx
-def importMesh_IndexedFaceSet(geom, bpyima):
+def importMesh_IndexedFaceSet(geom, bpyima, ancestry):
# print geom.lineno, geom.id, vrmlNode.DEF_NAMESPACE.keys()
- ccw = geom.getFieldAsBool('ccw', True)
- ifs_colorPerVertex = geom.getFieldAsBool('colorPerVertex', True) # per vertex or per face
- ifs_normalPerVertex = geom.getFieldAsBool('normalPerVertex', True)
+ ccw = geom.getFieldAsBool('ccw', True, ancestry)
+ ifs_colorPerVertex = geom.getFieldAsBool('colorPerVertex', True, ancestry) # per vertex or per face
+ ifs_normalPerVertex = geom.getFieldAsBool('normalPerVertex', True, ancestry)
# This is odd how point is inside Coordinate
@@ -1298,14 +1442,14 @@ def importMesh_IndexedFaceSet(geom, bpyima):
coord = geom.getChildBySpec('Coordinate') # works for x3d and vrml
- if coord: ifs_points = coord.getFieldAsArray('point', 3)
+ if coord: ifs_points = coord.getFieldAsArray('point', 3, ancestry)
else: coord = []
if not coord:
print '\tWarnint: IndexedFaceSet has no points'
return None, ccw
- ifs_faces = geom.getFieldAsArray('coordIndex', 0)
+ ifs_faces = geom.getFieldAsArray('coordIndex', 0, ancestry)
coords_tex = None
if ifs_faces: # In rare cases this causes problems - no faces but UVs???
@@ -1315,8 +1459,8 @@ def importMesh_IndexedFaceSet(geom, bpyima):
coords_tex = geom.getChildBySpec('TextureCoordinate')
if coords_tex:
- ifs_texpoints = coords_tex.getFieldAsArray('point', 2)
- ifs_texfaces = geom.getFieldAsArray('texCoordIndex', 0)
+ ifs_texpoints = coords_tex.getFieldAsArray('point', 2, ancestry)
+ ifs_texfaces = geom.getFieldAsArray('texCoordIndex', 0, ancestry)
if not ifs_texpoints:
# IF we have no coords, then dont bother
@@ -1329,11 +1473,11 @@ def importMesh_IndexedFaceSet(geom, bpyima):
vcolor_spot = None # spot color when we dont have an array of colors
if vcolor:
# float to char
- ifs_vcol = [[int(c*256) for c in col] for col in vcolor.getFieldAsArray('color', 3)]
- ifs_color_index = geom.getFieldAsArray('colorIndex', 0)
+ ifs_vcol = [[int(c*256) for c in col] for col in vcolor.getFieldAsArray('color', 3, ancestry)]
+ ifs_color_index = geom.getFieldAsArray('colorIndex', 0, ancestry)
if not ifs_vcol:
- vcolor_spot = [int(c*256) for c in vcolor.getFieldAsFloatTuple('color', [])]
+ vcolor_spot = [int(c*256) for c in vcolor.getFieldAsFloatTuple('color', [], ancestry)]
# Convert faces into somthing blender can use
edges = []
@@ -1546,18 +1690,18 @@ def importMesh_IndexedFaceSet(geom, bpyima):
return bpymesh, ccw
-def importMesh_IndexedLineSet(geom):
+def importMesh_IndexedLineSet(geom, ancestry):
# VRML not x3d
#coord = geom.getChildByName('coord') # 'Coordinate'
coord = geom.getChildBySpec('Coordinate') # works for x3d and vrml
- if coord: points = coord.getFieldAsArray('point', 3)
+ if coord: points = coord.getFieldAsArray('point', 3, ancestry)
else: points = []
if not points:
print '\tWarning: IndexedLineSet had no points'
return None
- ils_lines = geom.getFieldAsArray('coordIndex', 0)
+ ils_lines = geom.getFieldAsArray('coordIndex', 0, ancestry)
lines = []
line = []
@@ -1596,11 +1740,11 @@ def importMesh_IndexedLineSet(geom):
return bpycurve
-def importMesh_PointSet(geom):
+def importMesh_PointSet(geom, ancestry):
# VRML not x3d
#coord = geom.getChildByName('coord') # 'Coordinate'
coord = geom.getChildBySpec('Coordinate') # works for x3d and vrml
- if coord: points = coord.getFieldAsArray('point', 3)
+ if coord: points = coord.getFieldAsArray('point', 3, ancestry)
else: points = []
# vcolor = geom.getChildByName('color') # blender dosnt have per vertex color
@@ -1614,26 +1758,26 @@ GLOBALS['CIRCLE_DETAIL'] = 12
MATRIX_Z_TO_Y = RotationMatrix(90, 4, 'x')
-def importMesh_Sphere(geom):
+def importMesh_Sphere(geom, ancestry):
# bpymesh = bpy.data.meshes.new()
- diameter = geom.getFieldAsFloat('radius', 0.5) * 2 # * 2 for the diameter
+ diameter = geom.getFieldAsFloat('radius', 0.5, ancestry) * 2 # * 2 for the diameter
bpymesh = Mesh.Primitives.UVsphere(GLOBALS['CIRCLE_DETAIL'], GLOBALS['CIRCLE_DETAIL'], diameter)
bpymesh.transform(MATRIX_Z_TO_Y)
return bpymesh
-def importMesh_Cylinder(geom):
+def importMesh_Cylinder(geom, ancestry):
# bpymesh = bpy.data.meshes.new()
- diameter = geom.getFieldAsFloat('radius', 1.0) * 2 # * 2 for the diameter
- height = geom.getFieldAsFloat('height', 2)
+ diameter = geom.getFieldAsFloat('radius', 1.0, ancestry) * 2 # * 2 for the diameter
+ height = geom.getFieldAsFloat('height', 2, ancestry)
bpymesh = Mesh.Primitives.Cylinder(GLOBALS['CIRCLE_DETAIL'], diameter, height)
bpymesh.transform(MATRIX_Z_TO_Y)
# Warning - Rely in the order Blender adds verts
# not nice design but wont change soon.
- bottom = geom.getFieldAsBool('bottom', True)
- side = geom.getFieldAsBool('side', True)
- top = geom.getFieldAsBool('top', True)
+ bottom = geom.getFieldAsBool('bottom', True, ancestry)
+ side = geom.getFieldAsBool('side', True, ancestry)
+ top = geom.getFieldAsBool('top', True, ancestry)
if not top: # last vert is top center of tri fan.
bpymesh.verts.delete([(GLOBALS['CIRCLE_DETAIL']+GLOBALS['CIRCLE_DETAIL'])+1])
@@ -1647,18 +1791,18 @@ def importMesh_Cylinder(geom):
return bpymesh
-def importMesh_Cone(geom):
+def importMesh_Cone(geom, ancestry):
# bpymesh = bpy.data.meshes.new()
- diameter = geom.getFieldAsFloat('bottomRadius', 1.0) * 2 # * 2 for the diameter
- height = geom.getFieldAsFloat('height', 2)
+ diameter = geom.getFieldAsFloat('bottomRadius', 1.0, ancestry) * 2 # * 2 for the diameter
+ height = geom.getFieldAsFloat('height', 2, ancestry)
bpymesh = Mesh.Primitives.Cone(GLOBALS['CIRCLE_DETAIL'], diameter, height)
bpymesh.transform(MATRIX_Z_TO_Y)
# Warning - Rely in the order Blender adds verts
# not nice design but wont change soon.
- bottom = geom.getFieldAsBool('bottom', True)
- side = geom.getFieldAsBool('side', True)
+ bottom = geom.getFieldAsBool('bottom', True, ancestry)
+ side = geom.getFieldAsBool('side', True, ancestry)
if not bottom: # last vert is on the bottom
bpymesh.verts.delete([GLOBALS['CIRCLE_DETAIL']+1])
@@ -1667,10 +1811,10 @@ def importMesh_Cone(geom):
return bpymesh
-def importMesh_Box(geom):
+def importMesh_Box(geom, ancestry):
# bpymesh = bpy.data.meshes.new()
- size = geom.getFieldAsFloatTuple('size', (2.0, 2.0, 2.0))
+ size = geom.getFieldAsFloatTuple('size', (2.0, 2.0, 2.0), ancestry)
bpymesh = Mesh.Primitives.Cube(1.0)
# Scale the box to the size set
@@ -1714,7 +1858,7 @@ def importShape(node, ancestry):
textx = appr.getChildBySpec('TextureTransform')
if textx:
- texmtx = translateTexTransform(textx)
+ texmtx = translateTexTransform(textx, ancestry)
@@ -1726,28 +1870,28 @@ def importShape(node, ancestry):
# all values between 0.0 and 1.0, defaults from VRML docs
bpymat = bpy.data.materials.new()
- bpymat.amb = mat.getFieldAsFloat('ambientIntensity', 0.2)
- bpymat.rgbCol = mat.getFieldAsFloatTuple('diffuseColor', [0.8, 0.8, 0.8])
+ bpymat.amb = mat.getFieldAsFloat('ambientIntensity', 0.2, ancestry)
+ bpymat.rgbCol = mat.getFieldAsFloatTuple('diffuseColor', [0.8, 0.8, 0.8], ancestry)
# NOTE - blender dosnt support emmisive color
# Store in mirror color and approximate with emit.
- emit = mat.getFieldAsFloatTuple('emissiveColor', [0.0, 0.0, 0.0])
+ emit = mat.getFieldAsFloatTuple('emissiveColor', [0.0, 0.0, 0.0], ancestry)
bpymat.mirCol = emit
bpymat.emit = (emit[0]+emit[1]+emit[2])/3.0
- bpymat.hard = int(1+(510*mat.getFieldAsFloat('shininess', 0.2))) # 0-1 -> 1-511
- bpymat.specCol = mat.getFieldAsFloatTuple('specularColor', [0.0, 0.0, 0.0])
- bpymat.alpha = 1.0 - mat.getFieldAsFloat('transparency', 0.0)
+ bpymat.hard = int(1+(510*mat.getFieldAsFloat('shininess', 0.2, ancestry))) # 0-1 -> 1-511
+ bpymat.specCol = mat.getFieldAsFloatTuple('specularColor', [0.0, 0.0, 0.0], ancestry)
+ bpymat.alpha = 1.0 - mat.getFieldAsFloat('transparency', 0.0, ancestry)
if bpymat.alpha < 0.999:
bpymat.mode |= Material.Modes.ZTRANSP
if ima:
- ima_url = ima.getFieldAsString('url')
+ ima_url = ima.getFieldAsString('url', None, ancestry)
if ima_url==None:
- try: ima_url = ima.getFieldAsStringArray('url')[0] # in some cases we get a list of images.
+ try: ima_url = ima.getFieldAsStringArray('url', None, ancestry)[0] # in some cases we get a list of images.
except: ima_url = None
if ima_url==None:
@@ -1772,8 +1916,8 @@ def importShape(node, ancestry):
else:
bpymat.setTexture(0, texture, Texture.TexCo.UV, Texture.MapTo.COL)
- ima_repS = ima.getFieldAsBool('repeatS', True)
- ima_repT = ima.getFieldAsBool('repeatT', True)
+ ima_repS = ima.getFieldAsBool('repeatS', True, ancestry)
+ ima_repT = ima.getFieldAsBool('repeatT', True, ancestry)
# To make this work properly we'd need to scale the UV's too, better to ignore th
# texture.repeat = max(1, ima_repS * 512), max(1, ima_repT * 512)
@@ -1785,19 +1929,19 @@ def importShape(node, ancestry):
geom_spec = geom.getSpec()
ccw = True
if geom_spec == 'IndexedFaceSet':
- bpydata, ccw = importMesh_IndexedFaceSet(geom, bpyima)
+ bpydata, ccw = importMesh_IndexedFaceSet(geom, bpyima, ancestry)
elif geom_spec == 'IndexedLineSet':
- bpydata = importMesh_IndexedLineSet(geom)
+ bpydata = importMesh_IndexedLineSet(geom, ancestry)
elif geom_spec == 'PointSet':
- bpydata = importMesh_PointSet(geom)
+ bpydata = importMesh_PointSet(geom, ancestry)
elif geom_spec == 'Sphere':
- bpydata = importMesh_Sphere(geom)
+ bpydata = importMesh_Sphere(geom, ancestry)
elif geom_spec == 'Box':
- bpydata = importMesh_Box(geom)
+ bpydata = importMesh_Box(geom, ancestry)
elif geom_spec == 'Cylinder':
- bpydata = importMesh_Cylinder(geom)
+ bpydata = importMesh_Cylinder(geom, ancestry)
elif geom_spec == 'Cone':
- bpydata = importMesh_Cone(geom)
+ bpydata = importMesh_Cone(geom, ancestry)
else:
print '\tWarning: unsupported type "%s"' % geom_spec
return
@@ -1810,8 +1954,8 @@ def importShape(node, ancestry):
bpyob = node.blendObject = bpy.data.scenes.active.objects.new(bpydata)
if type(bpydata) == Types.MeshType:
- is_solid = geom.getFieldAsBool('solid', True)
- creaseAngle = geom.getFieldAsFloat('creaseAngle', None)
+ is_solid = geom.getFieldAsBool('solid', True, ancestry)
+ creaseAngle = geom.getFieldAsFloat('creaseAngle', None, ancestry)
if creaseAngle != None:
bpydata.maxSmoothAngle = 1+int(min(79, creaseAngle * RAD_TO_DEG))
@@ -1852,17 +1996,17 @@ def importShape(node, ancestry):
bpyob.setMatrix( getFinalMatrix(node, None, ancestry) )
-def importLamp_PointLight(node):
+def importLamp_PointLight(node, ancestry):
vrmlname = node.getDefName()
if not vrmlname: vrmlname = 'PointLight'
- # ambientIntensity = node.getFieldAsFloat('ambientIntensity', 0.0) # TODO
- # attenuation = node.getFieldAsFloatTuple('attenuation', (1.0, 0.0, 0.0)) # TODO
- color = node.getFieldAsFloatTuple('color', (1.0, 1.0, 1.0))
- intensity = node.getFieldAsFloat('intensity', 1.0) # max is documented to be 1.0 but some files have higher.
- location = node.getFieldAsFloatTuple('location', (0.0, 0.0, 0.0))
- # is_on = node.getFieldAsBool('on', True) # TODO
- radius = node.getFieldAsFloat('radius', 100.0)
+ # ambientIntensity = node.getFieldAsFloat('ambientIntensity', 0.0, ancestry) # TODO
+ # attenuation = node.getFieldAsFloatTuple('attenuation', (1.0, 0.0, 0.0), ancestry) # TODO
+ color = node.getFieldAsFloatTuple('color', (1.0, 1.0, 1.0), ancestry)
+ intensity = node.getFieldAsFloat('intensity', 1.0, ancestry) # max is documented to be 1.0 but some files have higher.
+ location = node.getFieldAsFloatTuple('location', (0.0, 0.0, 0.0), ancestry)
+ # is_on = node.getFieldAsBool('on', True, ancestry) # TODO
+ radius = node.getFieldAsFloat('radius', 100.0, ancestry)
bpylamp = bpy.data.lamps.new()
bpylamp.setType('Lamp')
@@ -1874,15 +2018,15 @@ def importLamp_PointLight(node):
return bpylamp, mtx
-def importLamp_DirectionalLight(node):
+def importLamp_DirectionalLight(node, ancestry):
vrmlname = node.getDefName()
if not vrmlname: vrmlname = 'DirectLight'
# ambientIntensity = node.getFieldAsFloat('ambientIntensity', 0.0) # TODO
- color = node.getFieldAsFloatTuple('color', (1.0, 1.0, 1.0))
- direction = node.getFieldAsFloatTuple('direction', (0.0, 0.0, -1.0))
- intensity = node.getFieldAsFloat('intensity', 1.0) # max is documented to be 1.0 but some files have higher.
- # is_on = node.getFieldAsBool('on', True) # TODO
+ color = node.getFieldAsFloatTuple('color', (1.0, 1.0, 1.0), ancestry)
+ direction = node.getFieldAsFloatTuple('direction', (0.0, 0.0, -1.0), ancestry)
+ intensity = node.getFieldAsFloat('intensity', 1.0, ancestry) # max is documented to be 1.0 but some files have higher.
+ # is_on = node.getFieldAsBool('on', True, ancestry) # TODO
bpylamp = bpy.data.lamps.new(vrmlname)
bpylamp.setType('Sun')
@@ -1896,20 +2040,20 @@ def importLamp_DirectionalLight(node):
# looks like default values for beamWidth and cutOffAngle were swapped in VRML docs.
-def importLamp_SpotLight(node):
+def importLamp_SpotLight(node, ancestry):
vrmlname = node.getDefName()
if not vrmlname: vrmlname = 'SpotLight'
- # ambientIntensity = geom.getFieldAsFloat('ambientIntensity', 0.0) # TODO
- # attenuation = geom.getFieldAsFloatTuple('attenuation', (1.0, 0.0, 0.0)) # TODO
- beamWidth = node.getFieldAsFloat('beamWidth', 1.570796) * RAD_TO_DEG # max is documented to be 1.0 but some files have higher.
- color = node.getFieldAsFloatTuple('color', (1.0, 1.0, 1.0))
- cutOffAngle = node.getFieldAsFloat('cutOffAngle', 0.785398) * RAD_TO_DEG # max is documented to be 1.0 but some files have higher.
- direction = node.getFieldAsFloatTuple('direction', (0.0, 0.0, -1.0))
- intensity = node.getFieldAsFloat('intensity', 1.0) # max is documented to be 1.0 but some files have higher.
- location = node.getFieldAsFloatTuple('location', (0.0, 0.0, 0.0))
- # is_on = node.getFieldAsBool('on', True) # TODO
- radius = node.getFieldAsFloat('radius', 100.0)
+ # ambientIntensity = geom.getFieldAsFloat('ambientIntensity', 0.0, ancestry) # TODO
+ # attenuation = geom.getFieldAsFloatTuple('attenuation', (1.0, 0.0, 0.0), ancestry) # TODO
+ beamWidth = node.getFieldAsFloat('beamWidth', 1.570796, ancestry) * RAD_TO_DEG # max is documented to be 1.0 but some files have higher.
+ color = node.getFieldAsFloatTuple('color', (1.0, 1.0, 1.0), ancestry)
+ cutOffAngle = node.getFieldAsFloat('cutOffAngle', 0.785398, ancestry) * RAD_TO_DEG # max is documented to be 1.0 but some files have higher.
+ direction = node.getFieldAsFloatTuple('direction', (0.0, 0.0, -1.0), ancestry)
+ intensity = node.getFieldAsFloat('intensity', 1.0, ancestry) # max is documented to be 1.0 but some files have higher.
+ location = node.getFieldAsFloatTuple('location', (0.0, 0.0, 0.0), ancestry)
+ # is_on = node.getFieldAsBool('on', True, ancestry) # TODO
+ radius = node.getFieldAsFloat('radius', 100.0, ancestry)
bpylamp = bpy.data.lamps.new(vrmlname)
bpylamp.setType('Spot')
@@ -1935,14 +2079,14 @@ def importLamp_SpotLight(node):
def importLamp(node, spec, ancestry):
if spec=='PointLight':
- bpylamp,mtx = importLamp_PointLight(node)
+ bpylamp,mtx = importLamp_PointLight(node, ancestry)
elif spec=='DirectionalLight':
- bpylamp,mtx = importLamp_DirectionalLight(node)
+ bpylamp,mtx = importLamp_DirectionalLight(node, ancestry)
elif spec=='SpotLight':
- bpylamp,mtx = importLamp_SpotLight(node)
+ bpylamp,mtx = importLamp_SpotLight(node, ancestry)
else:
print "Error, not a lamp"
- raise ""
+ raise ValueError
bpyob = node.blendObject = bpy.data.scenes.active.objects.new(bpylamp)
bpyob.setMatrix( getFinalMatrix(node, mtx, ancestry) )
@@ -1952,11 +2096,11 @@ def importViewpoint(node, ancestry):
name = node.getDefName()
if not name: name = 'Viewpoint'
- fieldOfView = node.getFieldAsFloat('fieldOfView', 0.785398) * RAD_TO_DEG # max is documented to be 1.0 but some files have higher.
- # jump = node.getFieldAsBool('jump', True)
- orientation = node.getFieldAsFloatTuple('orientation', (0.0, 0.0, 1.0, 0.0))
- position = node.getFieldAsFloatTuple('position', (0.0, 0.0, 0.0))
- description = node.getFieldAsString('description', '')
+ fieldOfView = node.getFieldAsFloat('fieldOfView', 0.785398, ancestry) * RAD_TO_DEG # max is documented to be 1.0 but some files have higher.
+ # jump = node.getFieldAsBool('jump', True, ancestry)
+ orientation = node.getFieldAsFloatTuple('orientation', (0.0, 0.0, 1.0, 0.0), ancestry)
+ position = node.getFieldAsFloatTuple('position', (0.0, 0.0, 0.0), ancestry)
+ description = node.getFieldAsString('description', '', ancestry)
bpycam = bpy.data.cameras.new(name)
@@ -1980,59 +2124,72 @@ def importTransform(node, ancestry):
#def importTimeSensor(node):
-def translatePositionInterpolator(node, ipo):
- key = node.getFieldAsArray('key', 0)
- keyValue = node.getFieldAsArray('keyValue', 3)
-
- loc_x = ipo.addCurve('LocX')
- loc_y = ipo.addCurve('LocY')
- loc_z = ipo.addCurve('LocZ')
+def translatePositionInterpolator(node, ipo, ancestry):
+ key = node.getFieldAsArray('key', 0, ancestry)
+ keyValue = node.getFieldAsArray('keyValue', 3, ancestry)
+ try:
+ loc_x = ipo.addCurve('LocX')
+ loc_y = ipo.addCurve('LocY')
+ loc_z = ipo.addCurve('LocZ')
+ except ValueError:
+ return
+
loc_x.interpolation = loc_y.interpolation = loc_z.interpolation = Blender.IpoCurve.InterpTypes.LINEAR
for i, time in enumerate(key):
- x,y,z = keyValue[i]
+ try: x,y,z = keyValue[i]
+ except: continue
loc_x.append((time,x))
loc_y.append((time,y))
loc_z.append((time,z))
-def translateOrientationInterpolator(node, ipo):
- key = node.getFieldAsArray('key', 0)
- keyValue = node.getFieldAsArray('keyValue', 4)
+def translateOrientationInterpolator(node, ipo, ancestry):
+ key = node.getFieldAsArray('key', 0, ancestry)
+ keyValue = node.getFieldAsArray('keyValue', 4, ancestry)
- rot_x = ipo.addCurve('RotX')
- rot_y = ipo.addCurve('RotY')
- rot_z = ipo.addCurve('RotZ')
+ try:
+ rot_x = ipo.addCurve('RotX')
+ rot_y = ipo.addCurve('RotY')
+ rot_z = ipo.addCurve('RotZ')
+ except ValueError:
+ return
rot_x.interpolation = rot_y.interpolation = rot_z.interpolation = Blender.IpoCurve.InterpTypes.LINEAR
for i, time in enumerate(key):
+ try: x,y,z,w = keyValue[i]
+ except: continue
- mtx = translateRotation(keyValue[i])
+ mtx = translateRotation((x,y,z,w))
eul = mtx.toEuler()
rot_x.append((time,eul.x/10.0))
rot_y.append((time,eul.y/10.0))
rot_z.append((time,eul.z/10.0))
# Untested!
-def translateScalarInterpolator(node, ipo):
- key = node.getFieldAsArray('key', 0)
- keyValue = node.getFieldAsArray('keyValue', 4)
+def translateScalarInterpolator(node, ipo, ancestry):
+ key = node.getFieldAsArray('key', 0, ancestry)
+ keyValue = node.getFieldAsArray('keyValue', 4, ancestry)
- sca_x = ipo.addCurve('SizeX')
- sca_y = ipo.addCurve('SizeY')
- sca_z = ipo.addCurve('SizeZ')
+ try:
+ sca_x = ipo.addCurve('ScaleX')
+ sca_y = ipo.addCurve('ScaleY')
+ sca_z = ipo.addCurve('ScaleZ')
+ except ValueError:
+ return
sca_x.interpolation = sca_y.interpolation = sca_z.interpolation = Blender.IpoCurve.InterpTypes.LINEAR
for i, time in enumerate(key):
- x,y,z = keyValue[i]
+ try: x,y,z = keyValue[i]
+ except: continue
sca_x.append((time,x/10.0))
sca_y.append((time,y/10.0))
sca_z.append((time,z/10.0))
-def translateTimeSensor(node, ipo):
+def translateTimeSensor(node, ipo, ancestry):
'''
Apply a time sensor to an IPO, VRML has many combinations of loop/start/stop/cycle times
to give different results, for now just do the basics
@@ -2041,15 +2198,15 @@ def translateTimeSensor(node, ipo):
time_cu = ipo.addCurve('Time')
time_cu.interpolation = Blender.IpoCurve.InterpTypes.LINEAR
- cycleInterval = node.getFieldAsFloat('cycleInterval', None)
+ cycleInterval = node.getFieldAsFloat('cycleInterval', None, ancestry)
- startTime = node.getFieldAsFloat('startTime', 0.0)
- stopTime = node.getFieldAsFloat('stopTime', 250.0)
+ startTime = node.getFieldAsFloat('startTime', 0.0, ancestry)
+ stopTime = node.getFieldAsFloat('stopTime', 250.0, ancestry)
if cycleInterval != None:
stopTime = startTime+cycleInterval
- loop = node.getFieldAsBool('loop', False)
+ loop = node.getFieldAsBool('loop', False, ancestry)
time_cu.append((1+startTime, 0.0))
time_cu.append((1+stopTime, 1.0/10.0))# anoying, the UI uses /10
@@ -2059,11 +2216,14 @@ def translateTimeSensor(node, ipo):
time_cu.extend = Blender.IpoCurve.ExtendTypes.CYCLIC # or - EXTRAP, CYCLIC_EXTRAP, CONST,
-def importRoute(node):
+def importRoute(node, ancestry):
'''
Animation route only at the moment
'''
+ if not hasattr(node, 'fields'):
+ return
+
routeIpoDict = node.getRouteIpoDict()
def getIpo(id):
@@ -2100,22 +2260,22 @@ ROUTE champFly001.bindTime TO vpTs.set_startTime
if to_type == 'set_position':
ipo = getIpo(to_id)
set_data_from_node = defDict[from_id]
- translatePositionInterpolator(set_data_from_node, ipo)
+ translatePositionInterpolator(set_data_from_node, ipo, ancestry)
- if to_type == 'set_orientation':
+ if to_type in ('set_orientation', 'rotation'):
ipo = getIpo(to_id)
set_data_from_node = defDict[from_id]
- translateOrientationInterpolator(set_data_from_node, ipo)
+ translateOrientationInterpolator(set_data_from_node, ipo, ancestry)
if to_type == 'set_scale':
ipo = getIpo(to_id)
set_data_from_node = defDict[from_id]
- translateScalarInterpolator(set_data_from_node, ipo)
+ translateScalarInterpolator(set_data_from_node, ipo, ancestry)
- elif from_type == 'bindTime':
+ elif from_type =='bindTime':
ipo = getIpo(from_id)
time_node = defDict[to_id]
- translateTimeSensor(time_node, ipo)
+ translateTimeSensor(time_node, ipo, ancestry)
@@ -2147,6 +2307,12 @@ def load_web3d(path, PREF_FLAT=False, PREF_CIRCLE_DIV=16, HELPER_FUNC = None):
# continue
spec = node.getSpec()
+ '''
+ prefix = node.getPrefix()
+ if prefix=='PROTO':
+ pass
+ el
+ '''
if spec=='Shape':
importShape(node, ancestry)
elif spec in ('PointLight', 'DirectionalLight', 'SpotLight'):
@@ -2174,7 +2340,7 @@ def load_web3d(path, PREF_FLAT=False, PREF_CIRCLE_DIV=16, HELPER_FUNC = None):
# After we import all nodes, route events - anim paths
for node, ancestry in all_nodes:
- importRoute(node)
+ importRoute(node, ancestry)
for node, ancestry in all_nodes:
if node.isRoot():
@@ -2289,24 +2455,31 @@ if __name__ == '__main__':
# load_web3d('/fe/x3d/www.web3d.org/x3d/content/examples/Basic/StudentProjects/PlayRoom.x3d') # invalid UVs
-'''
-import os
-# files = os.popen('find /fe/wrl -iname "*.wrl"').readlines()
-# files = os.popen('find /fe/x3d -iname "*.x3d"').readlines()
-files = os.popen('find /fe/x3d/X3dExamplesSavage -iname "*.x3d"').readlines()
-files.sort()
-tot = len(files)
-for i, f in enumerate(files):
- if i < 12803 or i > 1000000:
- continue
- #if i != 12686:
- # continue
+
+def test():
+ import os
- f = f.strip()
- print f, i, tot
- sce = bpy.data.scenes.new(f.split('/')[-1])
- bpy.data.scenes.active = sce
- # Window.
- load_web3d(f, PREF_FLAT=True)
-''' \ No newline at end of file
+ files = os.popen('find /fe/wrl -iname "*.wrl"').readlines()
+ # files = os.popen('find /fe/x3d -iname "*.x3d"').readlines()
+ # files = os.popen('find /fe/x3d/X3dExamplesSavage -iname "*.x3d"').readlines()
+
+ files.sort()
+ tot = len(files)
+ for i, f in enumerate(files):
+ #if i < 126 or i > 1000000:
+ # continue
+
+ if i < 181 or i > 1000000:
+ continue
+
+ #if i != 12686:
+ # continue
+
+ f = f.strip()
+ print f, i, tot
+ sce = bpy.data.scenes.new(str(i) + '_' + f.split('/')[-1])
+ bpy.data.scenes.active = sce
+ # Window.
+ load_web3d(f, PREF_FLAT=True)
+ \ No newline at end of file