diff options
Diffstat (limited to 'io_scene_x3d')
-rw-r--r-- | io_scene_x3d/__init__.py | 167 | ||||
-rw-r--r-- | io_scene_x3d/export_x3d.py | 1346 | ||||
-rw-r--r-- | io_scene_x3d/import_x3d.py | 2656 |
3 files changed, 4169 insertions, 0 deletions
diff --git a/io_scene_x3d/__init__.py b/io_scene_x3d/__init__.py new file mode 100644 index 00000000..c176d95c --- /dev/null +++ b/io_scene_x3d/__init__.py @@ -0,0 +1,167 @@ +# ##### BEGIN GPL LICENSE BLOCK ##### +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License +# as published by the Free Software Foundation; either version 2 +# of the License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. +# +# ##### END GPL LICENSE BLOCK ##### + +# <pep8 compliant> + +bl_info = { + "name": "Web3D X3D/VRML format", + "author": "Campbell Barton, Bart", + "blender": (2, 5, 7), + "api": 35622, + "location": "File > Import-Export", + "description": "Import-Export X3D, Import VRML", + "warning": "", + "wiki_url": "http://wiki.blender.org/index.php/Extensions:2.5/Py/"\ + "Scripts/Import-Export/Web3D", + "tracker_url": "", + "support": 'OFFICIAL', + "category": "Import-Export"} + +# To support reload properly, try to access a package var, if it's there, reload everything +if "bpy" in locals(): + import imp + if "export_x3d" in locals(): + imp.reload(export_x3d) + + +import bpy +from bpy.props import StringProperty, BoolProperty, EnumProperty +from bpy_extras.io_utils import ImportHelper, ExportHelper, axis_conversion, path_reference_mode + + +class ImportX3D(bpy.types.Operator, ImportHelper): + '''Import and X3D or VRML file''' + bl_idname = "import_scene.x3d" + bl_label = "Import X3D/VRML" + + filename_ext = ".x3d" + filter_glob = StringProperty(default="*.x3d;*.wrl", options={'HIDDEN'}) + + axis_forward = EnumProperty( + name="Forward", + items=(('X', "X Forward", ""), + ('Y', "Y Forward", ""), + ('Z', "Z Forward", ""), + ('-X', "-X Forward", ""), + ('-Y', "-Y Forward", ""), + ('-Z', "-Z Forward", ""), + ), + default='Z', + ) + + axis_up = EnumProperty( + name="Up", + items=(('X', "X Up", ""), + ('Y', "Y Up", ""), + ('Z', "Z Up", ""), + ('-X', "-X Up", ""), + ('-Y', "-Y Up", ""), + ('-Z', "-Z Up", ""), + ), + default='Y', + ) + + def execute(self, context): + from . import import_x3d + + keywords = self.as_keywords(ignore=("axis_forward", "axis_up", "filter_glob")) + global_matrix = axis_conversion(from_forward=self.axis_forward, from_up=self.axis_up).to_4x4() + keywords["global_matrix"] = global_matrix + + return import_x3d.load(self, context, **keywords) + + +class ExportX3D(bpy.types.Operator, ExportHelper): + '''Export selection to Extensible 3D file (.x3d)''' + bl_idname = "export_scene.x3d" + bl_label = 'Export X3D' + + filename_ext = ".x3d" + filter_glob = StringProperty(default="*.x3d", options={'HIDDEN'}) + + use_selection = BoolProperty(name="Selection Only", description="Export selected objects only", default=False) + use_apply_modifiers = BoolProperty(name="Apply Modifiers", description="Use transformed mesh data from each object", default=True) + use_triangulate = BoolProperty(name="Triangulate", description="Write quads into 'IndexedTriangleSet'", default=True) + use_normals = BoolProperty(name="Normals", description="Write normals with geometry", default=False) + use_compress = BoolProperty(name="Compress", description="GZip the resulting file, requires a full python install", default=False) + use_hierarchy = BoolProperty(name="Hierarchy", description="Export parent child relationships", default=True) + use_h3d = BoolProperty(name="H3D Extensions", description="Export shaders for H3D", default=False) + + axis_forward = EnumProperty( + name="Forward", + items=(('X', "X Forward", ""), + ('Y', "Y Forward", ""), + ('Z', "Z Forward", ""), + ('-X', "-X Forward", ""), + ('-Y', "-Y Forward", ""), + ('-Z', "-Z Forward", ""), + ), + default='Z', + ) + + axis_up = EnumProperty( + name="Up", + items=(('X', "X Up", ""), + ('Y', "Y Up", ""), + ('Z', "Z Up", ""), + ('-X', "-X Up", ""), + ('-Y', "-Y Up", ""), + ('-Z', "-Z Up", ""), + ), + default='Y', + ) + + path_mode = path_reference_mode + + def execute(self, context): + from . import export_x3d + from mathutils import Matrix + + keywords = self.as_keywords(ignore=("axis_forward", "axis_up", "check_existing", "filter_glob")) + global_matrix = axis_conversion(to_forward=self.axis_forward, to_up=self.axis_up).to_4x4() + keywords["global_matrix"] = global_matrix + + return export_x3d.save(self, context, **keywords) + + +def menu_func_import(self, context): + self.layout.operator(ImportX3D.bl_idname, text="X3D Extensible 3D (.x3d/.wrl)") + + +def menu_func_export(self, context): + self.layout.operator(ExportX3D.bl_idname, text="X3D Extensible 3D (.x3d)") + + +def register(): + bpy.utils.register_module(__name__) + + bpy.types.INFO_MT_file_import.append(menu_func_import) + bpy.types.INFO_MT_file_export.append(menu_func_export) + + +def unregister(): + bpy.utils.unregister_module(__name__) + + bpy.types.INFO_MT_file_import.remove(menu_func_import) + bpy.types.INFO_MT_file_export.remove(menu_func_export) + +# NOTES +# - blender version is hardcoded + +if __name__ == "__main__": + register() diff --git a/io_scene_x3d/export_x3d.py b/io_scene_x3d/export_x3d.py new file mode 100644 index 00000000..42334337 --- /dev/null +++ b/io_scene_x3d/export_x3d.py @@ -0,0 +1,1346 @@ +# ##### BEGIN GPL LICENSE BLOCK ##### +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License +# as published by the Free Software Foundation; either version 2 +# of the License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. +# +# ##### END GPL LICENSE BLOCK ##### + +# <pep8 compliant> + +# Contributors: bart:neeneenee*de, http://www.neeneenee.de/vrml, Campbell Barton + +""" +This script exports to X3D format. + +Usage: +Run this script from "File->Export" menu. A pop-up will ask whether you +want to export only selected or all relevant objects. + +Known issues: + Doesn't handle multiple materials (don't use material indices);<br> + Doesn't handle multiple UV textures on a single mesh (create a mesh for each texture);<br> + Can't get the texture array associated with material * not the UV ones; +""" + +import math +import os + +import bpy +import mathutils + +from bpy_extras.io_utils import create_derived_objects, free_derived_objects + +x3d_names_reserved = {'Anchor', 'Appearance', 'Arc2D', 'ArcClose2D', 'AudioClip', 'Background', 'Billboard', + 'BooleanFilter', 'BooleanSequencer', 'BooleanToggle', 'BooleanTrigger', 'Box', 'Circle2D', + 'Collision', 'Color', 'ColorInterpolator', 'ColorRGBA', 'component', 'Cone', 'connect', + 'Contour2D', 'ContourPolyline2D', 'Coordinate', 'CoordinateDouble', 'CoordinateInterpolator', + 'CoordinateInterpolator2D', 'Cylinder', 'CylinderSensor', 'DirectionalLight', 'Disk2D', + 'ElevationGrid', 'EspduTransform', 'EXPORT', 'ExternProtoDeclare', 'Extrusion', 'field', + 'fieldValue', 'FillProperties', 'Fog', 'FontStyle', 'GeoCoordinate', 'GeoElevationGrid', + 'GeoLocationLocation', 'GeoLOD', 'GeoMetadata', 'GeoOrigin', 'GeoPositionInterpolator', + 'GeoTouchSensor', 'GeoViewpoint', 'Group', 'HAnimDisplacer', 'HAnimHumanoid', 'HAnimJoint', + 'HAnimSegment', 'HAnimSite', 'head', 'ImageTexture', 'IMPORT', 'IndexedFaceSet', + 'IndexedLineSet', 'IndexedTriangleFanSet', 'IndexedTriangleSet', 'IndexedTriangleStripSet', + 'Inline', 'IntegerSequencer', 'IntegerTrigger', 'IS', 'KeySensor', 'LineProperties', 'LineSet', + 'LoadSensor', 'LOD', 'Material', 'meta', 'MetadataDouble', 'MetadataFloat', 'MetadataInteger', + 'MetadataSet', 'MetadataString', 'MovieTexture', 'MultiTexture', 'MultiTextureCoordinate', + 'MultiTextureTransform', 'NavigationInfo', 'Normal', 'NormalInterpolator', 'NurbsCurve', + 'NurbsCurve2D', 'NurbsOrientationInterpolator', 'NurbsPatchSurface', + 'NurbsPositionInterpolator', 'NurbsSet', 'NurbsSurfaceInterpolator', 'NurbsSweptSurface', + 'NurbsSwungSurface', 'NurbsTextureCoordinate', 'NurbsTrimmedSurface', 'OrientationInterpolator', + 'PixelTexture', 'PlaneSensor', 'PointLight', 'PointSet', 'Polyline2D', 'Polypoint2D', + 'PositionInterpolator', 'PositionInterpolator2D', 'ProtoBody', 'ProtoDeclare', 'ProtoInstance', + 'ProtoInterface', 'ProximitySensor', 'ReceiverPdu', 'Rectangle2D', 'ROUTE', 'ScalarInterpolator', + 'Scene', 'Script', 'Shape', 'SignalPdu', 'Sound', 'Sphere', 'SphereSensor', 'SpotLight', 'StaticGroup', + 'StringSensor', 'Switch', 'Text', 'TextureBackground', 'TextureCoordinate', 'TextureCoordinateGenerator', + 'TextureTransform', 'TimeSensor', 'TimeTrigger', 'TouchSensor', 'Transform', 'TransmitterPdu', + 'TriangleFanSet', 'TriangleSet', 'TriangleSet2D', 'TriangleStripSet', 'Viewpoint', 'VisibilitySensor', + 'WorldInfo', 'X3D', 'XvlShell', 'VertexShader', 'FragmentShader', 'MultiShaderAppearance', 'ShaderAppearance'} + + +def clamp_color(col): + return tuple([max(min(c, 1.0), 0.0) for c in col]) + + +def matrix_direction_neg_z(matrix): + return (mathutils.Vector((0.0, 0.0, -1.0)) * matrix.to_3x3()).normalized()[:] + + +def prefix_quoted_str(value, prefix): + return value[0] + prefix + value[1:] + + +def build_hierarchy(objects): + """ returns parent child relationships, skipping + """ + objects_set = set(objects) + par_lookup = {} + + def test_parent(parent): + while (parent is not None) and (parent not in objects_set): + parent = parent.parent + return parent + + for obj in objects: + par_lookup.setdefault(test_parent(obj.parent), []).append((obj, [])) + + for parent, children in par_lookup.items(): + for obj, subchildren in children: + subchildren[:] = par_lookup.get(obj, []) + + return par_lookup[None] + + +# ----------------------------------------------------------------------------- +# H3D Functions +# ----------------------------------------------------------------------------- +def h3d_shader_glsl_frag_patch(filepath): + h3d_file = open(filepath, 'r') + lines = [] + for l in h3d_file: + l = l.replace("uniform mat4 unfinvviewmat;", "") + l = l.replace("unfinvviewmat", "gl_ModelViewMatrixInverse") + + ''' + l = l.replace("varying vec3 varposition;", "") + l = l.replace("varposition", "gl_Vertex") # not needed int H3D + ''' + + #l = l.replace("varying vec3 varnormal;", "") + #l = l.replace("varnormal", "gl_Normal") # view normal + #l = l.replace("varnormal", "normalize(-(gl_ModelViewMatrix * gl_Vertex).xyz)") # view normal + # l = l.replace("varnormal", "gl_NormalMatrix * gl_Normal") # view normal + lines.append(l) + + h3d_file.close() + + h3d_file = open(filepath, 'w') + h3d_file.writelines(lines) + h3d_file.close() + + +# ----------------------------------------------------------------------------- +# Functions for writing output file +# ----------------------------------------------------------------------------- + +def export(file, + global_matrix, + scene, + use_apply_modifiers=False, + use_selection=True, + use_triangulate=False, + use_normals=False, + use_hierarchy=True, + use_h3d=False, + path_mode='AUTO', + ): + + # ------------------------------------------------------------------------- + # Global Setup + # ------------------------------------------------------------------------- + import bpy_extras + from bpy_extras.io_utils import unique_name + from xml.sax.saxutils import quoteattr + + uuid_cache_object = {} # object + uuid_cache_lamp = {} # 'LA_' + object.name + uuid_cache_view = {} # object, different namespace + uuid_cache_mesh = {} # mesh + uuid_cache_material = {} # material + uuid_cache_image = {} # image + uuid_cache_world = {} # world + + # store files to copy + copy_set = set() + + fw = file.write + base_src = os.path.dirname(bpy.data.filepath) + base_dst = os.path.dirname(file.name) + filename_strip = os.path.splitext(os.path.basename(file.name))[0] + gpu_shader_cache = {} + + if use_h3d: + import gpu + gpu_shader_dummy_mat = bpy.data.materials.new('X3D_DYMMY_MAT') + gpu_shader_cache[None] = gpu.export_shader(scene, gpu_shader_dummy_mat) + + # ------------------------------------------------------------------------- + # File Writing Functions + # ------------------------------------------------------------------------- + + def writeHeader(ident): + filepath_quoted = quoteattr(os.path.basename(file.name)) + blender_ver_quoted = quoteattr('Blender %s' % bpy.app.version_string) + + fw('%s<?xml version="1.0" encoding="UTF-8"?>\n' % ident) + if use_h3d: + fw('%s<X3D profile="H3DAPI" version="1.4">\n' % ident) + else: + fw('%s<!DOCTYPE X3D PUBLIC "ISO//Web3D//DTD X3D 3.0//EN" "http://www.web3d.org/specifications/x3d-3.0.dtd">\n' % ident) + fw('%s<X3D version="3.0" profile="Immersive" xmlns:xsd="http://www.w3.org/2001/XMLSchema-instance" xsd:noNamespaceSchemaLocation="http://www.web3d.org/specifications/x3d-3.0.xsd">\n' % ident) + + ident += '\t' + fw('%s<head>\n' % ident) + ident += '\t' + fw('%s<meta name="filename" content=%s />\n' % (ident, filepath_quoted)) + fw('%s<meta name="generator" content=%s />\n' % (ident, blender_ver_quoted)) + # this info was never updated, so blender version should be enough + # fw('%s<meta name="translator" content="X3D exporter v1.55 (2006/01/17)" />\n' % ident) + ident = ident[:-1] + fw('%s</head>\n' % ident) + fw('%s<Scene>\n' % ident) + ident += '\t' + return ident + + def writeFooter(ident): + ident = ident[:-1] + fw('%s</Scene>\n' % ident) + ident = ident[:-1] + fw('%s</X3D>' % ident) + return ident + + def writeViewpoint(ident, obj, matrix, scene): + view_id = unique_name(obj, 'CA_' + obj.name, uuid_cache_view, clean_func=quoteattr) + + loc, quat, scale = matrix.decompose() + + ident_step = ident + (' ' * (-len(ident) + \ + fw('%s<Viewpoint ' % ident))) + fw('DEF=%s\n' % view_id) + fw(ident_step + 'centerOfRotation="0 0 0"\n') + fw(ident_step + 'position="%3.2f %3.2f %3.2f"\n' % loc[:]) + fw(ident_step + 'orientation="%3.2f %3.2f %3.2f %3.2f"\n' % (quat.axis[:] + (quat.angle, ))) + fw(ident_step + 'fieldOfView="%.3g"\n' % obj.data.angle) + fw(ident_step + '/>\n') + + def writeFog(ident, world): + if world: + mtype = world.mist_settings.falloff + mparam = world.mist_settings + else: + return + + if mparam.use_mist: + ident_step = ident + (' ' * (-len(ident) + \ + fw('%s<Fog ' % ident))) + fw('fogType="%s"\n' % ('LINEAR' if (mtype == 'LINEAR') else 'EXPONENTIAL')) + fw(ident_step + 'color="%.3g %.3g %.3g"\n' % clamp_color(world.horizon_color)) + fw(ident_step + 'visibilityRange="%.3g"\n' % mparam.depth) + fw(ident_step + '/>\n') + else: + return + + def writeNavigationInfo(ident, scene): + ident_step = ident + (' ' * (-len(ident) + \ + fw('%s<NavigationInfo ' % ident))) + fw('headlight="false"\n') + fw(ident_step + 'visibilityLimit="0.0"\n') + fw(ident_step + 'type=\'"EXAMINE", "ANY"\'\n') + fw(ident_step + 'avatarSize="0.25, 1.75, 0.75"\n') + fw(ident_step + '/>\n') + + def writeTransform_begin(ident, matrix, def_id): + ident_step = ident + (' ' * (-len(ident) + \ + fw('%s<Transform ' % ident))) + if def_id is not None: + fw('DEF=%s\n' % def_id) + else: + fw('\n') + + loc, quat, sca = matrix.decompose() + + fw(ident_step + 'translation="%.6g %.6g %.6g"\n' % loc[:]) + # fw(ident_step + 'center="%.6g %.6g %.6g"\n' % (0, 0, 0)) + fw(ident_step + 'scale="%.6g %.6g %.6g"\n' % sca[:]) + fw(ident_step + 'rotation="%.6g %.6g %.6g %.6g"\n' % (quat.axis[:] + (quat.angle, ))) + fw(ident_step + '>\n') + ident += '\t' + return ident + + def writeTransform_end(ident): + ident = ident[:-1] + fw('%s</Transform>\n' % ident) + return ident + + def writeSpotLight(ident, obj, matrix, lamp, world): + # note, lamp_id is not re-used + lamp_id = unique_name(obj, 'LA_' + obj.name, uuid_cache_lamp, clean_func=quoteattr) + + if world: + ambi = world.ambient_color + amb_intensity = ((ambi[0] + ambi[1] + ambi[2]) / 3.0) / 2.5 + del ambi + else: + amb_intensity = 0.0 + + # compute cutoff and beamwidth + intensity = min(lamp.energy / 1.75, 1.0) + beamWidth = lamp.spot_size * 0.37 + # beamWidth=((lamp.spotSize*math.pi)/180.0)*.37 + cutOffAngle = beamWidth * 1.3 + + orientation = matrix_direction_neg_z(matrix) + + location = matrix.to_translation()[:] + + radius = lamp.distance * math.cos(beamWidth) + # radius = lamp.dist*math.cos(beamWidth) + ident_step = ident + (' ' * (-len(ident) + \ + fw('%s<SpotLight ' % ident))) + fw('DEF=%s\n' % lamp_id) + fw(ident_step + 'radius="%.4g"\n' % radius) + fw(ident_step + 'ambientIntensity="%.4g"\n' % amb_intensity) + fw(ident_step + 'intensity="%.4g"\n' % intensity) + fw(ident_step + 'color="%.4g %.4g %.4g"\n' % clamp_color(lamp.color)) + fw(ident_step + 'beamWidth="%.4g"\n' % beamWidth) + fw(ident_step + 'cutOffAngle="%.4g"\n' % cutOffAngle) + fw(ident_step + 'direction="%.4g %.4g %.4g"\n' % orientation) + fw(ident_step + 'location="%.4g %.4g %.4g"\n' % location) + fw(ident_step + '/>\n') + + def writeDirectionalLight(ident, obj, matrix, lamp, world): + # note, lamp_id is not re-used + lamp_id = unique_name(obj, 'LA_' + obj.name, uuid_cache_lamp, clean_func=quoteattr) + + if world: + ambi = world.ambient_color + # ambi = world.amb + amb_intensity = ((float(ambi[0] + ambi[1] + ambi[2])) / 3.0) / 2.5 + else: + ambi = 0 + amb_intensity = 0.0 + + intensity = min(lamp.energy / 1.75, 1.0) + + orientation = matrix_direction_neg_z(matrix) + + ident_step = ident + (' ' * (-len(ident) + \ + fw('%s<DirectionalLight ' % ident))) + fw('DEF=%s\n' % lamp_id) + fw(ident_step + 'ambientIntensity="%.4g"\n' % amb_intensity) + fw(ident_step + 'color="%.4g %.4g %.4g"\n' % clamp_color(lamp.color)) + fw(ident_step + 'intensity="%.4g"\n' % intensity) + fw(ident_step + 'direction="%.4g %.4g %.4g"\n' % orientation) + fw(ident_step + '/>\n') + + def writePointLight(ident, obj, matrix, lamp, world): + # note, lamp_id is not re-used + lamp_id = unique_name(obj, 'LA_' + obj.name, uuid_cache_lamp, clean_func=quoteattr) + + if world: + ambi = world.ambient_color + # ambi = world.amb + amb_intensity = ((float(ambi[0] + ambi[1] + ambi[2])) / 3.0) / 2.5 + else: + ambi = 0.0 + amb_intensity = 0.0 + + intensity = min(lamp.energy / 1.75, 1.0) + location = matrix.to_translation()[:] + + ident_step = ident + (' ' * (-len(ident) + \ + fw('%s<PointLight ' % ident))) + fw('DEF=%s\n' % lamp_id) + fw(ident_step + 'ambientIntensity="%.4g"\n' % amb_intensity) + fw(ident_step + 'color="%.4g %.4g %.4g"\n' % clamp_color(lamp.color)) + + fw(ident_step + 'intensity="%.4g"\n' % intensity) + fw(ident_step + 'radius="%.4g" \n' % lamp.distance) + fw(ident_step + 'location="%.4g %.4g %.4g"\n' % location) + fw(ident_step + '/>\n') + + def writeIndexedFaceSet(ident, obj, mesh, matrix, world): + obj_id = unique_name(obj, 'OB_' + obj.name, uuid_cache_object, clean_func=quoteattr) + mesh_id = unique_name(mesh, 'ME_' + mesh.name, uuid_cache_mesh, clean_func=quoteattr) + mesh_id_group = prefix_quoted_str(mesh_id, 'group_') + mesh_id_coords = prefix_quoted_str(mesh_id, 'coords_') + mesh_id_normals = prefix_quoted_str(mesh_id, 'normals_') + + if not mesh.faces: + return + + texface_use_halo = 0 + texface_use_billboard = 0 + texface_use_collision = 0 + + use_halonode = False + use_billnode = False + use_collnode = False + + if mesh.uv_textures.active: # if mesh.faceUV: + for face in mesh.uv_textures.active.data: # for face in mesh.faces: + texface_use_halo |= face.use_halo + texface_use_billboard |= face.use_billboard + texface_use_collision |= face.use_collision + # texface_use_object_color |= face.use_object_color + + if texface_use_halo: + fw('%s<Billboard axisOfRotation="0 0 0">\n' % ident) + use_halonode = True + ident += '\t' + elif texface_use_billboard: + fw('%s<Billboard axisOfRotation="0 1 0">\n' % ident) + use_billnode = True + ident += '\t' + elif texface_use_collision: + fw('%s<Collision enabled="false">\n' % ident) + use_collnode = True + ident += '\t' + + del texface_use_halo + del texface_use_billboard + del texface_use_collision + # del texface_use_object_color + + ident = writeTransform_begin(ident, matrix, None) + + if mesh.tag: + fw('%s<Group USE=%s />\n' % (ident, mesh_id_group)) + else: + mesh.tag = True + + fw('%s<Group DEF=%s>\n' % (ident, mesh_id_group)) + ident += '\t' + + is_uv = bool(mesh.uv_textures.active) + # is_col, defined for each material + + is_coords_written = False + + mesh_materials = mesh.materials[:] + if not mesh_materials: + mesh_materials = [None] + + mesh_material_tex = [None] * len(mesh_materials) + mesh_material_mtex = [None] * len(mesh_materials) + mesh_material_images = [None] * len(mesh_materials) + + for i, material in enumerate(mesh_materials): + if material: + for mtex in material.texture_slots: + if mtex: + tex = mtex.texture + if tex and tex.type == 'IMAGE': + image = tex.image + if image: + mesh_material_tex[i] = tex + mesh_material_mtex[i] = mtex + mesh_material_images[i] = image + break + + mesh_materials_use_face_texture = [getattr(material, 'use_face_texture', True) for material in mesh_materials] + + # fast access! + mesh_vertices = mesh.vertices[:] + mesh_faces = mesh.faces[:] + mesh_faces_materials = [f.material_index for f in mesh_faces] + mesh_faces_vertices = [f.vertices[:] for f in mesh_faces] + + if is_uv and True in mesh_materials_use_face_texture: + mesh_faces_image = [(fuv.image if (mesh_materials_use_face_texture[mesh_faces_materials[i]] and fuv.use_image) else mesh_material_images[mesh_faces_materials[i]]) for i, fuv in enumerate(mesh.uv_textures.active.data)] + mesh_faces_image_unique = set(mesh_faces_image) + elif len(set(mesh_material_images) | {None}) > 1: # make sure there is at least one image + mesh_faces_image = [mesh_material_images[material_index] for material_index in mesh_faces_materials] + mesh_faces_image_unique = set(mesh_faces_image) + else: + mesh_faces_image = [None] * len(mesh_faces) + mesh_faces_image_unique = {None} + + # group faces + face_groups = {} + for material_index in range(len(mesh_materials)): + for image in mesh_faces_image_unique: + face_groups[material_index, image] = [] + del mesh_faces_image_unique + + for i, (material_index, image) in enumerate(zip(mesh_faces_materials, mesh_faces_image)): + face_groups[material_index, image].append(i) + + # same as face_groups.items() but sorted so we can get predictable output. + face_groups_items = list(face_groups.items()) + face_groups_items.sort(key=lambda m: (m[0][0], getattr(m[0][1], 'name', ''))) + + for (material_index, image), face_group in face_groups_items: # face_groups.items() + if face_group: + material = mesh_materials[material_index] + + fw('%s<Shape>\n' % ident) + ident += '\t' + + is_smooth = False + is_col = (mesh.vertex_colors.active and (material is None or material.use_vertex_color_paint)) + + # kludge but as good as it gets! + for i in face_group: + if mesh_faces[i].use_smooth: + is_smooth = True + break + + # UV's and VCols split verts off which effects smoothing + # force writing normals in this case. + is_force_normals = use_triangulate and is_smooth and (is_uv or is_col) + + if use_h3d: + gpu_shader = gpu_shader_cache.get(material) # material can be 'None', uses dummy cache + if gpu_shader is None: + gpu_shader = gpu_shader_cache[material] = gpu.export_shader(scene, material) + + if 1: # XXX DEBUG + gpu_shader_tmp = gpu.export_shader(scene, material) + import pprint + print('\nWRITING MATERIAL:', material.name) + del gpu_shader_tmp['fragment'] + del gpu_shader_tmp['vertex'] + pprint.pprint(gpu_shader_tmp, width=120) + #pprint.pprint(val['vertex']) + del gpu_shader_tmp + + fw('%s<Appearance>\n' % ident) + ident += '\t' + + if image and not use_h3d: + writeImageTexture(ident, image) + + if mesh_materials_use_face_texture[material_index]: + if image.use_tiles: + fw('%s<TextureTransform scale="%s %s" />\n' % (ident, image.tiles_x, image.tiles_y)) + else: + # transform by mtex + loc = mesh_material_mtex[material_index].offset[:2] + + # mtex_scale * tex_repeat + sca_x, sca_y = mesh_material_mtex[material_index].scale[:2] + + sca_x *= mesh_material_tex[material_index].repeat_x + sca_y *= mesh_material_tex[material_index].repeat_y + + # flip x/y is a sampling feature, convert to transform + if mesh_material_tex[material_index].use_flip_axis: + rot = math.pi / -2.0 + sca_x, sca_y = sca_y, -sca_x + else: + rot = 0.0 + + ident_step = ident + (' ' * (-len(ident) + \ + fw('%s<TextureTransform ' % ident))) + # fw('center="%.6g %.6g" ' % (0.0, 0.0)) + fw(ident_step + 'translation="%.6g %.6g"\n' % loc) + fw(ident_step + 'scale="%.6g %.6g"\n' % (sca_x, sca_y)) + fw(ident_step + 'rotation="%.6g"\n' % rot) + fw(ident_step + '/>\n') + + if use_h3d: + mat_tmp = material if material else gpu_shader_dummy_mat + writeMaterialH3D(ident, mat_tmp, world, + obj, gpu_shader) + del mat_tmp + else: + if material: + writeMaterial(ident, material, world) + + ident = ident[:-1] + fw('%s</Appearance>\n' % ident) + + mesh_faces_col = mesh.vertex_colors.active.data if is_col else None + mesh_faces_uv = mesh.uv_textures.active.data if is_uv else None + + #-- IndexedFaceSet or IndexedLineSet + if use_triangulate: + ident_step = ident + (' ' * (-len(ident) + \ + fw('%s<IndexedTriangleSet ' % ident))) + + # --- Write IndexedTriangleSet Attributes (same as IndexedFaceSet) + fw('solid="%s"\n' % ('true' if mesh.show_double_sided else 'false')) + + # creaseAngle unsupported for IndexedTriangleSet's + + if use_normals or is_force_normals: + # currently not optional, could be made so: + fw(ident_step + 'normalPerVertex="true"\n') + + slot_uv = None + slot_col = None + + if is_uv and is_col: + slot_uv = 0 + slot_col = 1 + + def vertex_key(fidx, f_cnr_idx): + return ( + mesh_faces_uv[fidx].uv[f_cnr_idx][:], + getattr(mesh_faces_col[fidx], "color%d" % (f_cnr_idx + 1))[:], + ) + elif is_uv: + slot_uv = 0 + + def vertex_key(fidx, f_cnr_idx): + return ( + mesh_faces_uv[fidx].uv[f_cnr_idx][:], + ) + elif is_col: + slot_col = 0 + + def vertex_key(fidx, f_cnr_idx): + return ( + getattr(mesh_faces_col[fidx], "color%d" % (f_cnr_idx + 1))[:], + ) + else: + # ack, not especially efficient in this case + def vertex_key(fidx, f_cnr_idx): + return None + + # build a mesh mapping dict + vertex_hash = [{} for i in range(len(mesh.vertices))] + # worst case every face is a quad + face_tri_list = [[None, None, None] for i in range(len(mesh.faces) * 2)] + vert_tri_list = [] + totvert = 0 + totface = 0 + temp_face = [None] * 4 + for i in face_group: + fv = mesh_faces_vertices[i] + for j, v_idx in enumerate(fv): + key = vertex_key(i, j) + vh = vertex_hash[v_idx] + x3d_v = vh.get(key) + if x3d_v is None: + x3d_v = key, v_idx, totvert + vh[key] = x3d_v + # key / original_vertex / new_vertex + vert_tri_list.append(x3d_v) + totvert += 1 + temp_face[j] = x3d_v + + if len(fv) == 4: + f_iter = ((0, 1, 2), (0, 2, 3)) + else: + f_iter = ((0, 1, 2), ) + + for f_it in f_iter: + # loop over a quad as 2 tris + f_tri = face_tri_list[totface] + for ji, j in enumerate(f_it): + f_tri[ji] = temp_face[j] + # quads run this twice + totface += 1 + + # clear unused faces + face_tri_list[totface:] = [] + + fw(ident_step + 'index="') + for x3d_f in face_tri_list: + fw('%i %i %i ' % (x3d_f[0][2], x3d_f[1][2], x3d_f[2][2])) + fw('"\n') + + # close IndexedTriangleSet + fw(ident_step + '>\n') + ident += '\t' + + fw('%s<Coordinate ' % ident) + fw('point="') + for x3d_v in vert_tri_list: + fw('%.6g %.6g %.6g ' % mesh_vertices[x3d_v[1]].co[:]) + fw('" />\n') + + if use_normals or is_force_normals: + fw('%s<Normal ' % ident) + fw('vector="') + for x3d_v in vert_tri_list: + fw('%.6g %.6g %.6g ' % mesh_vertices[x3d_v[1]].normal[:]) + fw('" />\n') + + if is_uv: + fw('%s<TextureCoordinate point="' % ident) + for x3d_v in vert_tri_list: + fw('%.4g %.4g ' % x3d_v[0][slot_uv]) + fw('" />\n') + + if is_col: + fw('%s<Color color="' % ident) + for x3d_v in vert_tri_list: + fw('%.3g %.3g %.3g ' % x3d_v[0][slot_col]) + fw('" />\n') + + if use_h3d: + # write attributes + for gpu_attr in gpu_shader['attributes']: + + # UVs + if gpu_attr['type'] == gpu.CD_MTFACE: + if gpu_attr['datatype'] == gpu.GPU_DATA_2F: + fw('%s<FloatVertexAttribute ' % ident) + fw('name="%s" ' % gpu_attr['varname']) + fw('numComponents="2" ') + fw('value="') + for x3d_v in vert_tri_list: + fw('%.4g %.4g ' % x3d_v[0][slot_uv]) + fw('" />\n') + else: + assert(0) + + elif gpu_attr['type'] == gpu.CD_MCOL: + if gpu_attr['datatype'] == gpu.GPU_DATA_4UB: + pass # XXX, H3D can't do + else: + assert(0) + + ident = ident[:-1] + + fw('%s</IndexedTriangleSet>\n' % ident) + + else: + ident_step = ident + (' ' * (-len(ident) + \ + fw('%s<IndexedFaceSet ' % ident))) + + # --- Write IndexedFaceSet Attributes (same as IndexedTriangleSet) + fw('solid="%s"\n' % ('true' if mesh.show_double_sided else 'false')) + if is_smooth: + fw(ident_step + 'creaseAngle="%.4g"\n' % mesh.auto_smooth_angle) + + if use_normals: + # currently not optional, could be made so: + fw(ident_step + 'normalPerVertex="true"\n') + + # IndexedTriangleSet assumes true + if is_col: + fw(ident_step + 'colorPerVertex="false"\n') + + # for IndexedTriangleSet we use a uv per vertex so this isnt needed. + if is_uv: + fw(ident_step + 'texCoordIndex="') + + j = 0 + for i in face_group: + if len(mesh_faces_vertices[i]) == 4: + fw('%d %d %d %d -1 ' % (j, j + 1, j + 2, j + 3)) + j += 4 + else: + fw('%d %d %d -1 ' % (j, j + 1, j + 2)) + j += 3 + fw('"\n') + # --- end texCoordIndex + + if True: + fw(ident_step + 'coordIndex="') + for i in face_group: + fv = mesh_faces_vertices[i] + if len(fv) == 3: + fw('%i %i %i -1 ' % fv) + else: + fw('%i %i %i %i -1 ' % fv) + + fw('"\n') + # --- end coordIndex + + # close IndexedFaceSet + fw(ident_step + '>\n') + ident += '\t' + + # --- Write IndexedFaceSet Elements + if True: + if is_coords_written: + fw('%s<Coordinate USE=%s />\n' % (ident, mesh_id_coords)) + if use_normals: + fw('%s<Normal USE=%s />\n' % (ident, mesh_id_normals)) + else: + ident_step = ident + (' ' * (-len(ident) + \ + fw('%s<Coordinate ' % ident))) + fw('DEF=%s\n' % mesh_id_coords) + fw(ident_step + 'point="') + for v in mesh.vertices: + fw('%.6g %.6g %.6g ' % v.co[:]) + fw('"\n') + fw(ident_step + '/>\n') + + is_coords_written = True + + if use_normals: + ident_step = ident + (' ' * (-len(ident) + \ + fw('%s<Normal ' % ident))) + fw('DEF=%s\n' % mesh_id_normals) + fw(ident_step + 'vector="') + for v in mesh.vertices: + fw('%.6g %.6g %.6g ' % v.normal[:]) + fw('"\n') + fw(ident_step + '/>\n') + + if is_uv: + fw('%s<TextureCoordinate point="' % ident) + for i in face_group: + for uv in mesh_faces_uv[i].uv: + fw('%.4g %.4g ' % uv[:]) + del mesh_faces_uv + fw('" />\n') + + if is_col: + fw('%s<Color color="' % ident) + # XXX, 1 color per face, only + for i in face_group: + fw('%.3g %.3g %.3g ' % mesh_faces_col[i].color1[:]) + fw('" />\n') + + #--- output vertexColors + + #--- output closing braces + ident = ident[:-1] + + fw('%s</IndexedFaceSet>\n' % ident) + + ident = ident[:-1] + fw('%s</Shape>\n' % ident) + + # XXX + + #fw('%s<PythonScript DEF="PS" url="object.py" >\n' % ident) + #fw('%s <ShaderProgram USE="MA_Material.005" containerField="references"/>\n' % ident) + #fw('%s</PythonScript>\n' % ident) + + ident = ident[:-1] + fw('%s</Group>\n' % ident) + + ident = writeTransform_end(ident) + + if use_halonode: + ident = ident[:-1] + fw('%s</Billboard>\n' % ident) + elif use_billnode: + ident = ident[:-1] + fw('%s</Billboard>\n' % ident) + elif use_collnode: + ident = ident[:-1] + fw('%s</Collision>\n' % ident) + + def writeMaterial(ident, material, world): + material_id = unique_name(material, 'MA_' + material.name, uuid_cache_material, clean_func=quoteattr) + + # look up material name, use it if available + if material.tag: + fw('%s<Material USE=%s />\n' % (ident, material_id)) + else: + material.tag = True + + emit = material.emit + ambient = material.ambient / 3.0 + diffuseColor = material.diffuse_color[:] + if world: + ambiColor = ((material.ambient * 2.0) * world.ambient_color)[:] + else: + ambiColor = 0.0, 0.0, 0.0 + + emitColor = tuple(((c * emit) + ambiColor[i]) / 2.0 for i, c in enumerate(diffuseColor)) + shininess = material.specular_hardness / 512.0 + specColor = tuple((c + 0.001) / (1.25 / (material.specular_intensity + 0.001)) for c in material.specular_color) + transp = 1.0 - material.alpha + + if material.use_shadeless: + ambient = 1.0 + shininess = 0.0 + specColor = emitColor = diffuseColor + + ident_step = ident + (' ' * (-len(ident) + \ + fw('%s<Material ' % ident))) + fw('DEF=%s\n' % material_id) + fw(ident_step + 'diffuseColor="%.3g %.3g %.3g"\n' % clamp_color(diffuseColor)) + fw(ident_step + 'specularColor="%.3g %.3g %.3g"\n' % clamp_color(specColor)) + fw(ident_step + 'emissiveColor="%.3g %.3g %.3g"\n' % clamp_color(emitColor)) + fw(ident_step + 'ambientIntensity="%.3g"\n' % ambient) + fw(ident_step + 'shininess="%.3g"\n' % shininess) + fw(ident_step + 'transparency="%s"\n' % transp) + fw(ident_step + '/>\n') + + def writeMaterialH3D(ident, material, world, + obj, gpu_shader): + material_id = unique_name(material, 'MA_' + material.name, uuid_cache_material, clean_func=quoteattr) + + fw('%s<Material />\n' % ident) + if material.tag: + fw('%s<ComposedShader USE=%s />\n' % (ident, material_id)) + else: + material.tag = True + + # GPU_material_bind_uniforms + # GPU_begin_object_materials + + #~ CD_MCOL 6 + #~ CD_MTFACE 5 + #~ CD_ORCO 14 + #~ CD_TANGENT 18 + #~ GPU_DATA_16F 7 + #~ GPU_DATA_1F 2 + #~ GPU_DATA_1I 1 + #~ GPU_DATA_2F 3 + #~ GPU_DATA_3F 4 + #~ GPU_DATA_4F 5 + #~ GPU_DATA_4UB 8 + #~ GPU_DATA_9F 6 + #~ GPU_DYNAMIC_LAMP_DYNCO 7 + #~ GPU_DYNAMIC_LAMP_DYNCOL 11 + #~ GPU_DYNAMIC_LAMP_DYNENERGY 10 + #~ GPU_DYNAMIC_LAMP_DYNIMAT 8 + #~ GPU_DYNAMIC_LAMP_DYNPERSMAT 9 + #~ GPU_DYNAMIC_LAMP_DYNVEC 6 + #~ GPU_DYNAMIC_OBJECT_COLOR 5 + #~ GPU_DYNAMIC_OBJECT_IMAT 4 + #~ GPU_DYNAMIC_OBJECT_MAT 2 + #~ GPU_DYNAMIC_OBJECT_VIEWIMAT 3 + #~ GPU_DYNAMIC_OBJECT_VIEWMAT 1 + #~ GPU_DYNAMIC_SAMPLER_2DBUFFER 12 + #~ GPU_DYNAMIC_SAMPLER_2DIMAGE 13 + #~ GPU_DYNAMIC_SAMPLER_2DSHADOW 14 + + ''' + inline const char* typeToString( X3DType t ) { + switch( t ) { + case SFFLOAT: return "SFFloat"; + case MFFLOAT: return "MFFloat"; + case SFDOUBLE: return "SFDouble"; + case MFDOUBLE: return "MFDouble"; + case SFTIME: return "SFTime"; + case MFTIME: return "MFTime"; + case SFINT32: return "SFInt32"; + case MFINT32: return "MFInt32"; + case SFVEC2F: return "SFVec2f"; + case MFVEC2F: return "MFVec2f"; + case SFVEC2D: return "SFVec2d"; + case MFVEC2D: return "MFVec2d"; + case SFVEC3F: return "SFVec3f"; + case MFVEC3F: return "MFVec3f"; + case SFVEC3D: return "SFVec3d"; + case MFVEC3D: return "MFVec3d"; + case SFVEC4F: return "SFVec4f"; + case MFVEC4F: return "MFVec4f"; + case SFVEC4D: return "SFVec4d"; + case MFVEC4D: return "MFVec4d"; + case SFBOOL: return "SFBool"; + case MFBOOL: return "MFBool"; + case SFSTRING: return "SFString"; + case MFSTRING: return "MFString"; + case SFNODE: return "SFNode"; + case MFNODE: return "MFNode"; + case SFCOLOR: return "SFColor"; + case MFCOLOR: return "MFColor"; + case SFCOLORRGBA: return "SFColorRGBA"; + case MFCOLORRGBA: return "MFColorRGBA"; + case SFROTATION: return "SFRotation"; + case MFROTATION: return "MFRotation"; + case SFQUATERNION: return "SFQuaternion"; + case MFQUATERNION: return "MFQuaternion"; + case SFMATRIX3F: return "SFMatrix3f"; + case MFMATRIX3F: return "MFMatrix3f"; + case SFMATRIX4F: return "SFMatrix4f"; + case MFMATRIX4F: return "MFMatrix4f"; + case SFMATRIX3D: return "SFMatrix3d"; + case MFMATRIX3D: return "MFMatrix3d"; + case SFMATRIX4D: return "SFMatrix4d"; + case MFMATRIX4D: return "MFMatrix4d"; + case UNKNOWN_X3D_TYPE: + default:return "UNKNOWN_X3D_TYPE"; + ''' + import gpu + + fw('%s<ComposedShader DEF=%s language="GLSL" >\n' % (ident, material_id)) + ident += '\t' + + shader_url_frag = 'shaders/%s_%s.frag' % (filename_strip, material_id[1:-1]) + shader_url_vert = 'shaders/%s_%s.vert' % (filename_strip, material_id[1:-1]) + + # write files + shader_dir = os.path.join(base_dst, 'shaders') + if not os.path.isdir(shader_dir): + os.mkdir(shader_dir) + + for uniform in gpu_shader['uniforms']: + if uniform['type'] == gpu.GPU_DYNAMIC_SAMPLER_2DIMAGE: + fw('%s<field name="%s" type="SFNode" accessType="inputOutput">\n' % (ident, uniform['varname'])) + writeImageTexture(ident + '\t', bpy.data.images[uniform['image']]) + fw('%s</field>\n' % ident) + + elif uniform['type'] == gpu.GPU_DYNAMIC_LAMP_DYNCO: + if uniform['datatype'] == gpu.GPU_DATA_3F: # should always be true! + value = '%.6g %.6g %.6g' % (global_matrix * bpy.data.objects[uniform['lamp']].matrix_world).to_translation()[:] + fw('%s<field name="%s" type="SFVec3f" accessType="inputOutput" value="%s" />\n' % (ident, uniform['varname'], value)) + else: + assert(0) + + elif uniform['type'] == gpu.GPU_DYNAMIC_LAMP_DYNCOL: + # odd we have both 3, 4 types. + lamp = bpy.data.objects[uniform['lamp']].data + value = '%.6g %.6g %.6g' % (lamp.color * lamp.energy)[:] + if uniform['datatype'] == gpu.GPU_DATA_3F: + fw('%s<field name="%s" type="SFVec3f" accessType="inputOutput" value="%s" />\n' % (ident, uniform['varname'], value)) + elif uniform['datatype'] == gpu.GPU_DATA_4F: + fw('%s<field name="%s" type="SFVec4f" accessType="inputOutput" value="%s 1.0" />\n' % (ident, uniform['varname'], value)) + else: + assert(0) + + elif uniform['type'] == gpu.GPU_DYNAMIC_LAMP_DYNENERGY: + # not used ? + assert(0) + + elif uniform['type'] == gpu.GPU_DYNAMIC_LAMP_DYNVEC: + if uniform['datatype'] == gpu.GPU_DATA_3F: + value = '%.6g %.6g %.6g' % (mathutils.Vector((0.0, 0.0, 1.0)) * (global_matrix * bpy.data.objects[uniform['lamp']].matrix_world).to_quaternion()).normalized()[:] + fw('%s<field name="%s" type="SFVec3f" accessType="inputOutput" value="%s" />\n' % (ident, uniform['varname'], value)) + else: + assert(0) + + elif uniform['type'] == gpu.GPU_DYNAMIC_OBJECT_VIEWIMAT: + if uniform['datatype'] == gpu.GPU_DATA_16F: + # must be updated dynamically + # TODO, write out 'viewpointMatrices.py' + value = ' '.join(['%.6f' % f for v in mathutils.Matrix() for f in v]) + fw('%s<field name="%s" type="SFMatrix4f" accessType="inputOutput" value="%s" />\n' % (ident, uniform['varname'], value)) + else: + assert(0) + + elif uniform['type'] == gpu.GPU_DYNAMIC_OBJECT_IMAT: + if uniform['datatype'] == gpu.GPU_DATA_16F: + value = ' '.join(['%.6f' % f for v in (global_matrix * obj.matrix_world).inverted() for f in v]) + fw('%s<field name="%s" type="SFMatrix4f" accessType="inputOutput" value="%s" />\n' % (ident, uniform['varname'], value)) + else: + assert(0) + + elif uniform['type'] == gpu.GPU_DYNAMIC_SAMPLER_2DSHADOW: + pass # XXX, shadow buffers not supported. + + elif uniform['type'] == gpu.GPU_DYNAMIC_SAMPLER_2DBUFFER: + if uniform['datatype'] == gpu.GPU_DATA_1I: + if 1: + tex = uniform['texpixels'] + value = [] + for i in range(0, len(tex) - 1, 4): + col = tex[i:i + 4] + value.append('0x%.2x%.2x%.2x%.2x' % (col[0], col[1], col[2], col[3])) + + fw('%s<field name="%s" type="SFNode" accessType="inputOutput">\n' % (ident, uniform['varname'])) + + ident += '\t' + + ident_step = ident + (' ' * (-len(ident) + \ + fw('%s<PixelTexture \n' % ident))) + fw(ident_step + 'repeatS="false"\n') + fw(ident_step + 'repeatT="false"\n') + + fw(ident_step + 'image="%s 1 4 %s"\n' % (len(value), " ".join(value))) + + fw(ident_step + '/>\n') + + ident = ident[:-1] + + fw('%s</field>\n' % ident) + + #for i in range(0, 10, 4) + #value = ' '.join(['%d' % f for f in uniform['texpixels']]) + # value = ' '.join(['%.6g' % (f / 256) for f in uniform['texpixels']]) + + #fw('%s<field name="%s" type="SFInt32" accessType="inputOutput" value="%s" />\n' % (ident, uniform['varname'], value)) + #print('test', len(uniform['texpixels'])) + else: + assert(0) + else: + print("SKIPPING", uniform['type']) + + file_frag = open(os.path.join(base_dst, shader_url_frag), 'w') + file_frag.write(gpu_shader['fragment']) + file_frag.close() + # patch it + h3d_shader_glsl_frag_patch(os.path.join(base_dst, shader_url_frag)) + + file_vert = open(os.path.join(base_dst, shader_url_vert), 'w') + file_vert.write(gpu_shader['vertex']) + file_vert.close() + + fw('%s<ShaderPart type="FRAGMENT" url="%s" />\n' % (ident, shader_url_frag)) + fw('%s<ShaderPart type="VERTEX" url="%s" />\n' % (ident, shader_url_vert)) + ident = ident[:-1] + + fw('%s</ComposedShader>\n' % ident) + + def writeImageTexture(ident, image): + image_id = unique_name(image, 'IM_' + image.name, uuid_cache_image, clean_func=quoteattr) + + if image.tag: + fw('%s<ImageTexture USE=%s />\n' % (ident, image_id)) + else: + image.tag = True + + ident_step = ident + (' ' * (-len(ident) + \ + fw('%s<ImageTexture ' % ident))) + fw('DEF=%s\n' % image_id) + + # collect image paths, can load multiple + # [relative, name-only, absolute] + filepath = image.filepath + filepath_full = bpy.path.abspath(filepath) + filepath_ref = bpy_extras.io_utils.path_reference(filepath_full, base_src, base_dst, path_mode, "textures", copy_set) + filepath_base = os.path.basename(filepath_ref) + + images = [ + filepath_base, + filepath_ref, + filepath_full, + ] + + images = [f.replace('\\', '/') for f in images] + images = [f for i, f in enumerate(images) if f not in images[:i]] + + fw(ident_step + "url='%s' " % ' '.join(['"%s"' % f for f in images])) + fw(ident_step + '/>\n') + + def writeBackground(ident, world): + + if world is None: + return + + # note, not re-used + world_id = unique_name(world, 'WO_' + world.name, uuid_cache_world, clean_func=quoteattr) + + blending = world.use_sky_blend, world.use_sky_paper, world.use_sky_real + + grd_triple = clamp_color(world.horizon_color) + sky_triple = clamp_color(world.zenith_color) + mix_triple = clamp_color((grd_triple[i] + sky_triple[i]) / 2.0 for i in range(3)) + + ident_step = ident + (' ' * (-len(ident) + \ + fw('%s<Background ' % ident))) + fw('DEF=%s\n' % world_id) + # No Skytype - just Hor color + if blending == (False, False, False): + fw(ident_step + 'groundColor="%.3g %.3g %.3g"\n' % grd_triple) + fw(ident_step + 'skyColor="%.3g %.3g %.3g"\n' % grd_triple) + # Blend Gradient + elif blending == (True, False, False): + fw(ident_step + 'groundColor="%.3g %.3g %.3g, %.3g %.3g %.3g"\n' % (grd_triple + mix_triple)) + fw(ident_step + 'groundAngle="1.57, 1.57"\n') + fw(ident_step + 'skyColor="%.3g %.3g %.3g, %.3g %.3g %.3g"\n' % (sky_triple + mix_triple)) + fw(ident_step + 'skyAngle="1.57, 1.57"\n') + # Blend+Real Gradient Inverse + elif blending == (True, False, True): + fw(ident_step + 'groundColor="%.3g %.3g %.3g, %.3g %.3g %.3g"\n' % (sky_triple + grd_triple)) + fw(ident_step + 'groundAngle="1.57"\n') + fw(ident_step + 'skyColor="%.3g %.3g %.3g, %.3g %.3g %.3g, %.3g %.3g %.3g"\n' % (sky_triple + grd_triple + sky_triple)) + fw(ident_step + 'skyAngle="1.57, 3.14159"\n') + # Paper - just Zen Color + elif blending == (False, False, True): + fw(ident_step + 'groundColor="%.3g %.3g %.3g"\n' % sky_triple) + fw(ident_step + 'skyColor="%.3g %.3g %.3g"\n' % sky_triple) + # Blend+Real+Paper - komplex gradient + elif blending == (True, True, True): + fw(ident_step + 'groundColor="%.3g %.3g %.3g, %.3g %.3g %.3g"\n' % (sky_triple + grd_triple)) + fw(ident_step + 'groundAngle="1.57, 1.57"\n') + fw(ident_step + 'skyColor="%.3g %.3g %.3g, %.3g %.3g %.3g"\n' % (sky_triple + grd_triple)) + fw(ident_step + 'skyAngle="1.57, 1.57"\n') + # Any Other two colors + else: + fw(ident_step + 'groundColor="%.3g %.3g %.3g"\n' % grd_triple) + fw(ident_step + 'skyColor="%.3g %.3g %.3g"\n' % sky_triple) + + for tex in bpy.data.textures: + if tex.type == 'IMAGE' and tex.image: + namemat = tex.name + pic = tex.image + basename = os.path.basename(bpy.path.abspath(pic.filepath)) + + if namemat == 'back': + fw(ident_step + 'backUrl="%s"\n' % basename) + elif namemat == 'bottom': + fw(ident_step + 'bottomUrl="%s"\n' % basename) + elif namemat == 'front': + fw(ident_step + 'frontUrl="%s"\n' % basename) + elif namemat == 'left': + fw(ident_step + 'leftUrl="%s"\n' % basename) + elif namemat == 'right': + fw(ident_step + 'rightUrl="%s"\n' % basename) + elif namemat == 'top': + fw(ident_step + 'topUrl="%s"\n' % basename) + + fw(ident_step + '/>\n') + + # ------------------------------------------------------------------------- + # Export Object Hierarchy (recursively called) + # ------------------------------------------------------------------------- + def export_object(ident, obj_main_parent, obj_main, obj_children): + world = scene.world + free, derived = create_derived_objects(scene, obj_main) + + if derived is None: + return + + if use_hierarchy: + obj_main_matrix_world = obj_main.matrix_world + if obj_main_parent: + obj_main_matrix = obj_main_parent.matrix_world.inverted() * obj_main_matrix_world + else: + obj_main_matrix = obj_main_matrix_world + obj_main_matrix_world_invert = obj_main_matrix_world.inverted() + + obj_main_id = unique_name(obj_main, obj_main.name, uuid_cache_object, clean_func=quoteattr) + + ident = writeTransform_begin(ident, obj_main_matrix if obj_main_parent else global_matrix * obj_main_matrix, obj_main_id) + + for obj, obj_matrix in derived: + obj_type = obj.type + + if use_hierarchy: + # make transform node relative + obj_matrix = obj_main_matrix_world_invert * obj_matrix + + if obj_type == 'CAMERA': + writeViewpoint(ident, obj, obj_matrix, scene) + elif obj_type in ('MESH', 'CURVE', 'SURF', 'FONT'): + if (obj_type != 'MESH') or (use_apply_modifiers and obj.is_modified(scene, 'PREVIEW')): + try: + me = obj.to_mesh(scene, use_apply_modifiers, 'PREVIEW') + except: + me = None + else: + me = obj.data + + if me is not None: + writeIndexedFaceSet(ident, obj, me, obj_matrix, world) + + # free mesh created with create_mesh() + if me != obj.data: + bpy.data.meshes.remove(me) + + elif obj_type == 'LAMP': + data = obj.data + datatype = data.type + if datatype == 'POINT': + writePointLight(ident, obj, obj_matrix, data, world) + elif datatype == 'SPOT': + writeSpotLight(ident, obj, obj_matrix, data, world) + elif datatype == 'SUN': + writeDirectionalLight(ident, obj, obj_matrix, data, world) + else: + writeDirectionalLight(ident, obj, obj_matrix, data, world) + else: + #print "Info: Ignoring [%s], object type [%s] not handle yet" % (object.name,object.getType) + pass + + if free: + free_derived_objects(obj_main) + + # --------------------------------------------------------------------- + # write out children recursively + # --------------------------------------------------------------------- + for obj_child, obj_child_children in obj_children: + export_object(ident, obj_main, obj_child, obj_child_children) + + if use_hierarchy: + ident = writeTransform_end(ident) + + # ------------------------------------------------------------------------- + # Main Export Function + # ------------------------------------------------------------------------- + def export_main(): + world = scene.world + + # tag un-exported IDs + bpy.data.meshes.tag(False) + bpy.data.materials.tag(False) + bpy.data.images.tag(False) + + print('Info: starting X3D export to %r...' % file.name) + ident = '' + ident = writeHeader(ident) + + writeNavigationInfo(ident, scene) + writeBackground(ident, world) + writeFog(ident, world) + + ident = '\t\t' + + if use_selection: + objects = [obj for obj in scene.objects if obj.is_visible(scene) and o.select] + else: + objects = [obj for obj in scene.objects if obj.is_visible(scene)] + + if use_hierarchy: + objects_hierarchy = build_hierarchy(objects) + else: + objects_hierarchy = ((obj, []) for obj in objects) + + for obj_main, obj_main_children in objects_hierarchy: + export_object(ident, None, obj_main, obj_main_children) + + ident = writeFooter(ident) + + export_main() + + # ------------------------------------------------------------------------- + # global cleanup + # ------------------------------------------------------------------------- + file.close() + + if use_h3d: + bpy.data.materials.remove(gpu_shader_dummy_mat) + + # copy all collected files. + print(copy_set) + bpy_extras.io_utils.path_reference_copy(copy_set) + + print('Info: finished X3D export to %r' % file.name) + + +########################################################## +# Callbacks, needed before Main +########################################################## + + +def save(operator, context, filepath="", + use_selection=True, + use_apply_modifiers=False, + use_triangulate=False, + use_normals=False, + use_compress=False, + use_hierarchy=True, + use_h3d=False, + global_matrix=None, + path_mode='AUTO', + ): + + bpy.path.ensure_ext(filepath, '.x3dz' if use_compress else '.x3d') + + if bpy.ops.object.mode_set.poll(): + bpy.ops.object.mode_set(mode='OBJECT') + + file = None + if use_compress: + try: + import gzip + file = gzip.open(filepath, 'w') + except: + print('failed to import compression modules, exporting uncompressed') + filepath = filepath[:-1] # remove trailing z + + if file is None: + file = open(filepath, 'w') + + if global_matrix is None: + global_matrix = mathutils.Matrix() + + export(file, + global_matrix, + context.scene, + use_apply_modifiers=use_apply_modifiers, + use_selection=use_selection, + use_triangulate=use_triangulate, + use_normals=use_normals, + use_hierarchy=use_hierarchy, + use_h3d=use_h3d, + path_mode=path_mode, + ) + + return {'FINISHED'} diff --git a/io_scene_x3d/import_x3d.py b/io_scene_x3d/import_x3d.py new file mode 100644 index 00000000..28c0abac --- /dev/null +++ b/io_scene_x3d/import_x3d.py @@ -0,0 +1,2656 @@ +# ##### BEGIN GPL LICENSE BLOCK ##### +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License +# as published by the Free Software Foundation; either version 2 +# of the License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. +# +# ##### END GPL LICENSE BLOCK ##### + +# <pep8 compliant> + +DEBUG = False + +# This should work without a blender at all +import os + + +def imageConvertCompat(path): + + if os.sep == '\\': + return path # assime win32 has quicktime, dont convert + + if path.lower().endswith('.gif'): + path_to = path[:-3] + 'png' + + ''' + if exists(path_to): + return path_to + ''' + # print('\n'+path+'\n'+path_to+'\n') + os.system('convert "%s" "%s"' % (path, path_to)) # for now just hope we have image magick + + if os.path.exists(path_to): + return path_to + + return path + +# notes +# transform are relative +# order dosnt matter for loc/size/rot +# right handed rotation +# angles are in radians +# rotation first defines axis then ammount in radians + + +# =============================== VRML Spesific + +def vrmlFormat(data): + ''' + Keep this as a valid vrml file, but format in a way we can predict. + ''' + # Strip all commends - # not in strings - warning multiline strings are ignored. + def strip_comment(l): + #l = ' '.join(l.split()) + l = l.strip() + + if l.startswith('#'): + return '' + + i = l.find('#') + + if i == -1: + return l + + # Most cases accounted for! if we have a comment at the end of the line do this... + #j = l.find('url "') + j = l.find('"') + + if j == -1: # simple no strings + return l[:i].strip() + + q = False + for i, c in enumerate(l): + if c == '"': + q = not q # invert + + elif c == '#': + if q == False: + return l[:i - 1] + + return l + + data = '\n'.join([strip_comment(l) for l in data.split('\n')]) # remove all whitespace + + EXTRACT_STRINGS = True # only needed when strings or filesnames containe ,[]{} chars :/ + + if EXTRACT_STRINGS: + + # We need this so we can detect URL's + data = '\n'.join([' '.join(l.split()) for l in data.split('\n')]) # remove all whitespace + + string_ls = [] + + #search = 'url "' + search = '"' + + ok = True + last_i = 0 + while ok: + ok = False + i = data.find(search, last_i) + if i != -1: + + start = i + len(search) # first char after end of search + end = data.find('"', start) + if end != -1: + item = data[start:end] + string_ls.append(item) + data = data[:start] + data[end:] + ok = True # keep looking + + last_i = (end - len(item)) + 1 + # print(last_i, item, '|' + data[last_i] + '|') + + # done with messy extracting strings part + + # Bad, dont take strings into account + ''' + data = data.replace('#', '\n#') + data = '\n'.join([ll for l in data.split('\n') for ll in (l.strip(),) if not ll.startswith('#')]) # remove all whitespace + ''' + data = data.replace('{', '\n{\n') + data = data.replace('}', '\n}\n') + data = data.replace('[', '\n[\n') + data = data.replace(']', '\n]\n') + data = data.replace(',', ' , ') # make sure comma's separate + + if EXTRACT_STRINGS: + # add strings back in + + search = '"' # fill in these empty strings + + ok = True + last_i = 0 + while ok: + ok = False + i = data.find(search + '"', last_i) + # print(i) + if i != -1: + start = i + len(search) # first char after end of search + item = string_ls.pop(0) + # print(item) + data = data[:start] + item + data[start:] + + last_i = start + len(item) + 1 + + ok = True + + # More annoying obscure cases where USE or DEF are placed on a newline + # data = data.replace('\nDEF ', ' DEF ') + # data = data.replace('\nUSE ', ' USE ') + + data = '\n'.join([' '.join(l.split()) for l in data.split('\n')]) # remove all whitespace + + # Better to parse the file accounting for multiline arrays + ''' + data = data.replace(',\n', ' , ') # remove line endings with commas + data = data.replace(']', '\n]\n') # very very annoying - but some comma's are at the end of the list, must run this again. + ''' + + return [l for l in data.split('\n') if l] + +NODE_NORMAL = 1 # {} +NODE_ARRAY = 2 # [] +NODE_REFERENCE = 3 # USE foobar +# NODE_PROTO = 4 # + +lines = [] + + +def getNodePreText(i, words): + # print(lines[i]) + use_node = False + while len(words) < 5: + + if i >= len(lines): + break + ''' + elif lines[i].startswith('PROTO'): + return NODE_PROTO, i+1 + ''' + elif lines[i] == '{': + # words.append(lines[i]) # no need + # print("OK") + return NODE_NORMAL, i + 1 + elif lines[i].count('"') % 2 != 0: # odd number of quotes? - part of a string. + # print('ISSTRING') + break + else: + new_words = lines[i].split() + if 'USE' in new_words: + use_node = True + + words.extend(new_words) + i += 1 + + # Check for USE node - no { + # USE #id - should always be on the same line. + if use_node: + # print('LINE', i, words[:words.index('USE')+2]) + words[:] = words[:words.index('USE') + 2] + if lines[i] == '{' and lines[i + 1] == '}': + # USE sometimes has {} after it anyway + i += 2 + return NODE_REFERENCE, i + + # print("error value!!!", words) + return 0, -1 + + +def is_nodeline(i, words): + + if not lines[i][0].isalpha(): + return 0, 0 + + #if lines[i].startswith('field'): + # return 0, 0 + + # Is this a prototype?? + if lines[i].startswith('PROTO'): + words[:] = lines[i].split() + return NODE_NORMAL, i + 1 # TODO - assumes the next line is a '[\n', skip that + if lines[i].startswith('EXTERNPROTO'): + words[:] = lines[i].split() + return NODE_ARRAY, i + 1 # TODO - assumes the next line is a '[\n', skip that + + ''' + proto_type, new_i = is_protoline(i, words, proto_field_defs) + if new_i != -1: + return proto_type, new_i + ''' + + # Simple "var [" type + if lines[i + 1] == '[': + if lines[i].count('"') % 2 == 0: + words[:] = lines[i].split() + return NODE_ARRAY, i + 2 + + node_type, new_i = getNodePreText(i, words) + + if not node_type: + if DEBUG: + print("not node_type", lines[i]) + return 0, 0 + + # Ok, we have a { after some values + # Check the values are not fields + for i, val in enumerate(words): + if i != 0 and words[i - 1] in {'DEF', 'USE'}: + # ignore anything after DEF, it is a ID and can contain any chars. + pass + elif val[0].isalpha() and val not in {'TRUE', 'FALSE'}: + pass + else: + # There is a number in one of the values, therefor we are not a node. + return 0, 0 + + #if node_type==NODE_REFERENCE: + # print(words, "REF_!!!!!!!") + return node_type, new_i + + +def is_numline(i): + ''' + Does this line start with a number? + ''' + + # Works but too slow. + ''' + l = lines[i] + for w in l.split(): + if w==',': + pass + else: + try: + float(w) + return True + + except: + return False + + return False + ''' + + l = lines[i] + + line_start = 0 + + if l.startswith(', '): + line_start += 2 + + line_end = len(l) - 1 + line_end_new = l.find(' ', line_start) # comma's always have a space before them + + if line_end_new != -1: + line_end = line_end_new + + try: + float(l[line_start:line_end]) # works for a float or int + return True + except: + return False + + +class vrmlNode(object): + __slots__ = ('id', + 'fields', + 'proto_node', + 'proto_field_defs', + 'proto_fields', + 'node_type', + 'parent', + 'children', + 'parent', + 'array_data', + 'reference', + 'lineno', + 'filename', + 'blendObject', + 'DEF_NAMESPACE', + 'ROUTE_IPO_NAMESPACE', + 'PROTO_NAMESPACE', + 'x3dNode') + + def __init__(self, parent, node_type, lineno): + self.id = None + self.node_type = node_type + self.parent = parent + self.blendObject = None + self.x3dNode = None # for x3d import only + if parent: + parent.children.append(self) + + self.lineno = lineno + + # This is only set from the root nodes. + # Having a filename also denotes a root node + self.filename = None + self.proto_node = None # proto field definition eg: "field SFColor seatColor .6 .6 .1" + + # Store in the root node because each inline file needs its own root node and its own namespace + self.DEF_NAMESPACE = None + self.ROUTE_IPO_NAMESPACE = None + ''' + self.FIELD_NAMESPACE = None + ''' + + self.PROTO_NAMESPACE = None + + self.reference = None + + if node_type == NODE_REFERENCE: + # For references, only the parent and ID are needed + # the reference its self is assigned on parsing + return + + self.fields = [] # fields have no order, in some cases rool level values are not unique so dont use a dict + + self.proto_field_defs = [] # proto field definition eg: "field SFColor seatColor .6 .6 .1" + self.proto_fields = [] # proto field usage "diffuseColor IS seatColor" + self.children = [] + self.array_data = [] # use for arrays of data - should only be for NODE_ARRAY types + + # Only available from the root node + ''' + def getFieldDict(self): + if self.FIELD_NAMESPACE != None: + return self.FIELD_NAMESPACE + else: + return self.parent.getFieldDict() + ''' + def getProtoDict(self): + if self.PROTO_NAMESPACE != None: + return self.PROTO_NAMESPACE + else: + return self.parent.getProtoDict() + + def getDefDict(self): + if self.DEF_NAMESPACE != None: + return self.DEF_NAMESPACE + else: + return self.parent.getDefDict() + + def getRouteIpoDict(self): + if self.ROUTE_IPO_NAMESPACE != None: + return self.ROUTE_IPO_NAMESPACE + else: + return self.parent.getRouteIpoDict() + + def setRoot(self, filename): + self.filename = filename + # self.FIELD_NAMESPACE = {} + self.DEF_NAMESPACE = {} + self.ROUTE_IPO_NAMESPACE = {} + self.PROTO_NAMESPACE = {} + + def isRoot(self): + if self.filename is None: + return False + else: + return True + + def getFilename(self): + if self.filename: + return self.filename + elif self.parent: + return self.parent.getFilename() + else: + return None + + def getRealNode(self): + if self.reference: + return self.reference + else: + return self + + def getSpec(self): + self_real = self.getRealNode() + try: + return self_real.id[-1] # its possible this node has no spec + except: + return None + + def findSpecRecursive(self, spec): + self_real = self.getRealNode() + if spec == self_real.getSpec(): + return self + + for child in self_real.children: + if child.findSpecRecursive(spec): + return child + + return None + + def getPrefix(self): + if self.id: + return self.id[0] + return None + + def getSpecialTypeName(self, typename): + self_real = self.getRealNode() + try: + return self_real.id[list(self_real.id).index(typename) + 1] + except: + return None + + def getDefName(self): + return self.getSpecialTypeName('DEF') + + def getProtoName(self): + return self.getSpecialTypeName('PROTO') + + def getExternprotoName(self): + return self.getSpecialTypeName('EXTERNPROTO') + + def getChildrenBySpec(self, node_spec): # spec could be Transform, Shape, Appearance + self_real = self.getRealNode() + # using getSpec functions allows us to use the spec of USE children that dont have their spec in their ID + if type(node_spec) == str: + return [child for child in self_real.children if child.getSpec() == node_spec] + else: + # Check inside a list of optional types + return [child for child in self_real.children if child.getSpec() in node_spec] + + def getChildBySpec(self, node_spec): # spec could be Transform, Shape, Appearance + # Use in cases where there is only ever 1 child of this type + ls = self.getChildrenBySpec(node_spec) + if ls: + return ls[0] + else: + return None + + def getChildrenByName(self, node_name): # type could be geometry, children, appearance + self_real = self.getRealNode() + return [child for child in self_real.children if child.id if child.id[0] == node_name] + + def getChildByName(self, node_name): + self_real = self.getRealNode() + for child in self_real.children: + if child.id and child.id[0] == node_name: # and child.id[-1]==node_spec: + return child + + def getSerialized(self, results, ancestry): + ''' Return this node and all its children in a flat list ''' + ancestry = ancestry[:] # always use a copy + + # self_real = self.getRealNode() + + results.append((self, tuple(ancestry))) + ancestry.append(self) + for child in self.getRealNode().children: + if child not in ancestry: + # We dont want to load proto's, they are only references + # We could enforce this elsewhere + + # Only add this in a very special case + # where the parent of this object is not the real parent + # - In this case we have added the proto as a child to a node instancing it. + # This is a bit arbitary, but its how Proto's are done with this importer. + if child.getProtoName() is None and child.getExternprotoName() is None: + child.getSerialized(results, ancestry) + else: + + if DEBUG: + print('getSerialized() is proto:', child.getProtoName(), child.getExternprotoName(), self.getSpec()) + + self_spec = self.getSpec() + + if child.getProtoName() == self_spec or child.getExternprotoName() == self_spec: + #if DEBUG: + # "FoundProto!" + child.getSerialized(results, ancestry) + + return results + + def searchNodeTypeID(self, node_spec, results): + self_real = self.getRealNode() + # print(self.lineno, self.id) + if self_real.id and self_real.id[-1] == node_spec: # use last element, could also be only element + results.append(self_real) + for child in self_real.children: + child.searchNodeTypeID(node_spec, results) + return results + + def getFieldName(self, field, ancestry, AS_CHILD=False): + self_real = self.getRealNode() # incase we're an instance + + for f in self_real.fields: + # print(f) + if f and f[0] == field: + # print('\tfound field', f) + + if len(f) >= 3 and f[1] == 'IS': # eg: 'diffuseColor IS legColor' + field_id = f[2] + + # print("\n\n\n\n\n\nFOND IS!!!") + f_proto_lookup = None + f_proto_child_lookup = None + i = len(ancestry) + while i: + i -= 1 + node = ancestry[i] + node = node.getRealNode() + + # proto settings are stored in "self.proto_node" + if node.proto_node: + # Get the default value from the proto, this can be overwridden by the proto instace + # 'field SFColor legColor .8 .4 .7' + if AS_CHILD: + for child in node.proto_node.children: + #if child.id and len(child.id) >= 3 and child.id[2]==field_id: + if child.id and ('point' in child.id or 'points' in child.id): + f_proto_child_lookup = child + + else: + for f_def in node.proto_node.proto_field_defs: + if len(f_def) >= 4: + if f_def[0] == 'field' and f_def[2] == field_id: + f_proto_lookup = f_def[3:] + + # Node instance, Will be 1 up from the proto-node in the ancestry list. but NOT its parent. + # This is the setting as defined by the instance, including this setting is optional, + # and will override the default PROTO value + # eg: 'legColor 1 0 0' + if AS_CHILD: + for child in node.children: + if child.id and child.id[0] == field_id: + f_proto_child_lookup = child + else: + for f_def in node.fields: + if len(f_def) >= 2: + if f_def[0] == field_id: + if DEBUG: + print("getFieldName(), found proto", f_def) + f_proto_lookup = f_def[1:] + + if AS_CHILD: + if f_proto_child_lookup: + if DEBUG: + print("getFieldName() - AS_CHILD=True, child found") + print(f_proto_child_lookup) + return f_proto_child_lookup + else: + return f_proto_lookup + else: + if AS_CHILD: + return None + else: + # Not using a proto + return f[1:] + # print('\tfield not found', field) + + # See if this is a proto name + if AS_CHILD: + child_array = None + for child in self_real.children: + if child.id and len(child.id) == 1 and child.id[0] == field: + return child + + return None + + def getFieldAsInt(self, field, default, ancestry): + self_real = self.getRealNode() # incase we're an instance + + f = self_real.getFieldName(field, ancestry) + if f is None: + return default + if ',' in f: + f = f[:f.index(',')] # strip after the comma + + if len(f) != 1: + print('\t"%s" wrong length for int conversion for field "%s"' % (f, field)) + return default + + try: + return int(f[0]) + except: + print('\tvalue "%s" could not be used as an int for field "%s"' % (f[0], field)) + return default + + def getFieldAsFloat(self, field, default, ancestry): + self_real = self.getRealNode() # incase we're an instance + + f = self_real.getFieldName(field, ancestry) + if f is None: + return default + if ',' in f: + f = f[:f.index(',')] # strip after the comma + + if len(f) != 1: + print('\t"%s" wrong length for float conversion for field "%s"' % (f, field)) + return default + + try: + return float(f[0]) + except: + print('\tvalue "%s" could not be used as a float for field "%s"' % (f[0], field)) + return default + + def getFieldAsFloatTuple(self, field, default, ancestry): + self_real = self.getRealNode() # incase we're an instance + + f = self_real.getFieldName(field, ancestry) + if f is None: + return default + # if ',' in f: f = f[:f.index(',')] # strip after the comma + + if len(f) < 1: + print('"%s" wrong length for float tuple conversion for field "%s"' % (f, field)) + return default + + ret = [] + for v in f: + if v != ',': + try: + ret.append(float(v)) + except: + break # quit of first non float, perhaps its a new field name on the same line? - if so we are going to ignore it :/ TODO + # print(ret) + + if ret: + return ret + if not ret: + print('\tvalue "%s" could not be used as a float tuple for field "%s"' % (f, field)) + return default + + def getFieldAsBool(self, field, default, ancestry): + self_real = self.getRealNode() # incase we're an instance + + f = self_real.getFieldName(field, ancestry) + if f is None: + return default + if ',' in f: + f = f[:f.index(',')] # strip after the comma + + if len(f) != 1: + print('\t"%s" wrong length for bool conversion for field "%s"' % (f, field)) + return default + + if f[0].upper() == '"TRUE"' or f[0].upper() == 'TRUE': + return True + elif f[0].upper() == '"FALSE"' or f[0].upper() == 'FALSE': + return False + else: + print('\t"%s" could not be used as a bool for field "%s"' % (f[1], field)) + return default + + def getFieldAsString(self, field, default, ancestry): + self_real = self.getRealNode() # incase we're an instance + + f = self_real.getFieldName(field, ancestry) + if f is None: + return default + if len(f) < 1: + print('\t"%s" wrong length for string conversion for field "%s"' % (f, field)) + return default + + if len(f) > 1: + # String may contain spaces + st = ' '.join(f) + else: + st = f[0] + + # X3D HACK + if self.x3dNode: + return st + + if st[0] == '"' and st[-1] == '"': + return st[1:-1] + else: + print('\tvalue "%s" could not be used as a string for field "%s"' % (f[0], field)) + return default + + def getFieldAsArray(self, field, group, ancestry): + ''' + For this parser arrays are children + ''' + + def array_as_number(array_string): + array_data = [] + try: + array_data = [int(val) for val in array_string] + except: + try: + array_data = [float(val) for val in array_string] + except: + print('\tWarning, could not parse array data from field') + + return array_data + + self_real = self.getRealNode() # incase we're an instance + + child_array = self_real.getFieldName(field, ancestry, True) + + #if type(child_array)==list: # happens occasionaly + # array_data = child_array + + if child_array is None: + # For x3d, should work ok with vrml too + # for x3d arrays are fields, vrml they are nodes, annoying but not tooo bad. + data_split = self.getFieldName(field, ancestry) + if not data_split: + return [] + array_data = ' '.join(data_split) + if array_data is None: + return [] + + array_data = array_data.replace(',', ' ') + data_split = array_data.split() + + array_data = array_as_number(data_split) + + elif type(child_array) == list: + # x3d creates these + data_split = [w.strip(",") for w in child_array] + + array_data = array_as_number(data_split) + else: + # print(child_array) + # Normal vrml + array_data = child_array.array_data + + # print('array_data', array_data) + if group == -1 or len(array_data) == 0: + return array_data + + # We want a flat list + flat = True + for item in array_data: + if type(item) == list: + flat = False + break + + # make a flat array + if flat: + flat_array = array_data # we are alredy flat. + else: + flat_array = [] + + def extend_flat(ls): + for item in ls: + if type(item) == list: + extend_flat(item) + else: + flat_array.append(item) + + extend_flat(array_data) + + # We requested a flat array + if group == 0: + return flat_array + + new_array = [] + sub_array = [] + + for item in flat_array: + sub_array.append(item) + if len(sub_array) == group: + new_array.append(sub_array) + sub_array = [] + + if sub_array: + print('\twarning, array was not aligned to requested grouping', group, 'remaining value', sub_array) + + return new_array + + def getFieldAsStringArray(self, field, ancestry): + ''' + Get a list of strings + ''' + self_real = self.getRealNode() # incase we're an instance + + child_array = None + for child in self_real.children: + if child.id and len(child.id) == 1 and child.id[0] == field: + child_array = child + break + if not child_array: + return [] + + # each string gets its own list, remove ""'s + try: + new_array = [f[0][1:-1] for f in child_array.fields] + except: + print('\twarning, string array could not be made') + new_array = [] + + return new_array + + def getLevel(self): + # Ignore self_real + level = 0 + p = self.parent + while p: + level += 1 + p = p.parent + if not p: + break + + return level + + def __repr__(self): + level = self.getLevel() + ind = ' ' * level + if self.node_type == NODE_REFERENCE: + brackets = '' + elif self.node_type == NODE_NORMAL: + brackets = '{}' + else: + brackets = '[]' + + if brackets: + text = ind + brackets[0] + '\n' + else: + text = '' + + text += ind + 'ID: ' + str(self.id) + ' ' + str(level) + (' lineno %d\n' % self.lineno) + + if self.node_type == NODE_REFERENCE: + text += ind + "(reference node)\n" + return text + + if self.proto_node: + text += ind + 'PROTO NODE...\n' + text += str(self.proto_node) + text += ind + 'PROTO NODE_DONE\n' + + text += ind + 'FIELDS:' + str(len(self.fields)) + '\n' + + for i, item in enumerate(self.fields): + text += ind + 'FIELD:\n' + text += ind + str(item) + '\n' + + text += ind + 'PROTO_FIELD_DEFS:' + str(len(self.proto_field_defs)) + '\n' + + for i, item in enumerate(self.proto_field_defs): + text += ind + 'PROTO_FIELD:\n' + text += ind + str(item) + '\n' + + text += ind + 'ARRAY: ' + str(len(self.array_data)) + ' ' + str(self.array_data) + '\n' + #text += ind + 'ARRAY: ' + str(len(self.array_data)) + '[...] \n' + + text += ind + 'CHILDREN: ' + str(len(self.children)) + '\n' + for i, child in enumerate(self.children): + text += ind + ('CHILD%d:\n' % i) + text += str(child) + + text += '\n' + ind + brackets[1] + + return text + + def parse(self, i, IS_PROTO_DATA=False): + new_i = self.__parse(i, IS_PROTO_DATA) + + # print(self.id, self.getFilename()) + + # Check if this node was an inline or externproto + + url_ls = [] + + if self.node_type == NODE_NORMAL and self.getSpec() == 'Inline': + ancestry = [] # Warning! - PROTO's using this wont work at all. + url = self.getFieldAsString('url', None, ancestry) + if url: + url_ls = [(url, None)] + del ancestry + + elif self.getExternprotoName(): + # externproto + url_ls = [] + for f in self.fields: + + if type(f) == str: + f = [f] + + for ff in f: + for f_split in ff.split('"'): + # print(f_split) + # "someextern.vrml#SomeID" + if '#' in f_split: + + f_split, f_split_id = f_split.split('#') # there should only be 1 # anyway + + url_ls.append((f_split, f_split_id)) + else: + url_ls.append((f_split, None)) + + # Was either an Inline or an EXTERNPROTO + if url_ls: + + # print(url_ls) + + for url, extern_key in url_ls: + print(url) + urls = [] + urls.append(url) + urls.append(bpy.path.resolve_ncase(urls[-1])) + + urls.append(os.path.join(os.path.dirname(self.getFilename()), url)) + urls.append(bpy.path.resolve_ncase(urls[-1])) + + urls.append(os.path.join(os.path.dirname(self.getFilename()), os.path.basename(url))) + urls.append(bpy.path.resolve_ncase(urls[-1])) + + try: + url = [url for url in urls if os.path.exists(url)][0] + url_found = True + except: + url_found = False + + if not url_found: + print('\tWarning: Inline URL could not be found:', url) + else: + if url == self.getFilename(): + print('\tWarning: cant Inline yourself recursively:', url) + else: + + try: + data = gzipOpen(url) + except: + print('\tWarning: cant open the file:', url) + data = None + + if data: + # Tricky - inline another VRML + print('\tLoading Inline:"%s"...' % url) + + # Watch it! - backup lines + lines_old = lines[:] + + lines[:] = vrmlFormat(data) + + lines.insert(0, '{') + lines.insert(0, 'root_node____') + lines.append('}') + ''' + ff = open('/tmp/test.txt', 'w') + ff.writelines([l+'\n' for l in lines]) + ''' + + child = vrmlNode(self, NODE_NORMAL, -1) + child.setRoot(url) # initialized dicts + child.parse(0) + + # if self.getExternprotoName(): + if self.getExternprotoName(): + if not extern_key: # if none is spesified - use the name + extern_key = self.getSpec() + + if extern_key: + + self.children.remove(child) + child.parent = None + + extern_child = child.findSpecRecursive(extern_key) + + if extern_child: + self.children.append(extern_child) + extern_child.parent = self + + if DEBUG: + print("\tEXTERNPROTO ID found!:", extern_key) + else: + print("\tEXTERNPROTO ID not found!:", extern_key) + + # Watch it! - restore lines + lines[:] = lines_old + + return new_i + + def __parse(self, i, IS_PROTO_DATA=False): + ''' + print('parsing at', i, end="") + print(i, self.id, self.lineno) + ''' + l = lines[i] + + if l == '[': + # An anonymous list + self.id = None + i += 1 + else: + words = [] + + node_type, new_i = is_nodeline(i, words) + if not node_type: # fail for parsing new node. + print("Failed to parse new node") + raise ValueError + + if self.node_type == NODE_REFERENCE: + # Only assign the reference and quit + key = words[words.index('USE') + 1] + self.id = (words[0],) + + self.reference = self.getDefDict()[key] + return new_i + + self.id = tuple(words) + + # fill in DEF/USE + key = self.getDefName() + if key != None: + self.getDefDict()[key] = self + + key = self.getProtoName() + if not key: + key = self.getExternprotoName() + + proto_dict = self.getProtoDict() + if key != None: + proto_dict[key] = self + + # Parse the proto nodes fields + self.proto_node = vrmlNode(self, NODE_ARRAY, new_i) + new_i = self.proto_node.parse(new_i) + + self.children.remove(self.proto_node) + + # print(self.proto_node) + + new_i += 1 # skip past the { + + else: # If we're a proto instance, add the proto node as our child. + spec = self.getSpec() + try: + self.children.append(proto_dict[spec]) + #pass + except: + pass + + del spec + + del proto_dict, key + + i = new_i + + # print(self.id) + ok = True + while ok: + if i >= len(lines): + return len(lines) - 1 + + l = lines[i] + # print('\tDEBUG:', i, self.node_type, l) + if l == '': + i += 1 + continue + + if l == '}': + if self.node_type != NODE_NORMAL: # also ends proto nodes, we may want a type for these too. + print('wrong node ending, expected an } ' + str(i) + ' ' + str(self.node_type)) + if DEBUG: + raise ValueError + ### print("returning", i) + return i + 1 + if l == ']': + if self.node_type != NODE_ARRAY: + print('wrong node ending, expected a ] ' + str(i) + ' ' + str(self.node_type)) + if DEBUG: + raise ValueError + ### print("returning", i) + return i + 1 + + node_type, new_i = is_nodeline(i, []) + if node_type: # check text\n{ + child = vrmlNode(self, node_type, i) + i = child.parse(i) + + elif l == '[': # some files have these anonymous lists + child = vrmlNode(self, NODE_ARRAY, i) + i = child.parse(i) + + elif is_numline(i): + l_split = l.split(',') + + values = None + # See if each item is a float? + + for num_type in (int, float): + try: + values = [num_type(v) for v in l_split] + break + except: + pass + + try: + values = [[num_type(v) for v in segment.split()] for segment in l_split] + break + except: + pass + + if values is None: # dont parse + values = l_split + + # This should not extend over multiple lines however it is possible + # print(self.array_data) + if values: + self.array_data.extend(values) + i += 1 + else: + words = l.split() + if len(words) > 2 and words[1] == 'USE': + vrmlNode(self, NODE_REFERENCE, i) + else: + + # print("FIELD", i, l) + # + #words = l.split() + ### print('\t\ttag', i) + # this is a tag/ + # print(words, i, l) + value = l + # print(i) + # javastrips can exist as values. + quote_count = l.count('"') + if quote_count % 2: # odd number? + # print('MULTILINE') + while 1: + i += 1 + l = lines[i] + quote_count = l.count('"') + if quote_count % 2: # odd number? + value += '\n' + l[:l.rfind('"')] + break # assume + else: + value += '\n' + l + + value_all = value.split() + + def iskey(k): + if k[0] != '"' and k[0].isalpha() and k.upper() not in {'TRUE', 'FALSE'}: + return True + return False + + def split_fields(value): + ''' + key 0.0 otherkey 1,2,3 opt1 opt1 0.0 + -> [key 0.0], [otherkey 1,2,3], [opt1 opt1 0.0] + ''' + field_list = [] + field_context = [] + + for j in range(len(value)): + if iskey(value[j]): + if field_context: + # this IS a key but the previous value was not a key, ot it was a defined field. + if (not iskey(field_context[-1])) or ((len(field_context) == 3 and field_context[1] == 'IS')): + field_list.append(field_context) + + field_context = [value[j]] + else: + # The last item was not a value, multiple keys are needed in some cases. + field_context.append(value[j]) + else: + # Is empty, just add this on + field_context.append(value[j]) + else: + # Add a value to the list + field_context.append(value[j]) + + if field_context: + field_list.append(field_context) + + return field_list + + for value in split_fields(value_all): + # Split + + if value[0] == 'field': + # field SFFloat creaseAngle 4 + self.proto_field_defs.append(value) + else: + self.fields.append(value) + i += 1 + + +def gzipOpen(path): + try: + import gzip + except: + gzip = None + + data = None + if gzip: + try: + data = gzip.open(path, 'r').read() + except: + pass + else: + print('\tNote, gzip module could not be imported, compressed files will fail to load') + + if data is None: + try: + filehandle = open(path, 'rU') + data = filehandle.read() + filehandle.close() + except: + pass + + return data + + +def vrml_parse(path): + ''' + Sets up the root node and returns it so load_web3d() can deal with the blender side of things. + Return root (vrmlNode, '') or (None, 'Error String') + ''' + data = gzipOpen(path) + + if data is None: + return None, 'Failed to open file: ' + path + + # Stripped above + lines[:] = vrmlFormat(data) + + lines.insert(0, '{') + lines.insert(0, 'dymmy_node') + lines.append('}') + # Use for testing our parsed output, so we can check on line numbers. + + ''' + ff = open('/tmp/test.txt', 'w') + ff.writelines([l+'\n' for l in lines]) + ff.close() + ''' + + # Now evaluate it + node_type, new_i = is_nodeline(0, []) + if not node_type: + return None, 'Error: VRML file has no starting Node' + + # Trick to make sure we get all root nodes. + lines.insert(0, '{') + lines.insert(0, 'root_node____') # important the name starts with an ascii char + lines.append('}') + + root = vrmlNode(None, NODE_NORMAL, -1) + root.setRoot(path) # we need to set the root so we have a namespace and know the path incase of inlineing + + # Parse recursively + root.parse(0) + + # This prints a load of text + if DEBUG: + print(root) + + return root, '' + + +# ====================== END VRML + +# ====================== X3d Support + +# Sane as vrml but replace the parser +class x3dNode(vrmlNode): + def __init__(self, parent, node_type, x3dNode): + vrmlNode.__init__(self, parent, node_type, -1) + self.x3dNode = x3dNode + + def parse(self, IS_PROTO_DATA=False): + # print(self.x3dNode.tagName) + + define = self.x3dNode.getAttributeNode('DEF') + if define: + self.getDefDict()[define.value] = self + else: + use = self.x3dNode.getAttributeNode('USE') + if use: + try: + self.reference = self.getDefDict()[use.value] + self.node_type = NODE_REFERENCE + except: + print('\tWarning: reference', use.value, 'not found') + self.parent.children.remove(self) + + return + + for x3dChildNode in self.x3dNode.childNodes: + if x3dChildNode.nodeType in {x3dChildNode.TEXT_NODE, x3dChildNode.COMMENT_NODE, x3dChildNode.CDATA_SECTION_NODE}: + continue + + node_type = NODE_NORMAL + # print(x3dChildNode, dir(x3dChildNode)) + if x3dChildNode.getAttributeNode('USE'): + node_type = NODE_REFERENCE + + child = x3dNode(self, node_type, x3dChildNode) + child.parse() + + # TODO - x3d Inline + + def getSpec(self): + return self.x3dNode.tagName # should match vrml spec + + def getDefName(self): + data = self.x3dNode.getAttributeNode('DEF') + if data: + data.value # XXX, return?? + return None + + # Other funcs operate from vrml, but this means we can wrap XML fields, still use nice utility funcs + # getFieldAsArray getFieldAsBool etc + def getFieldName(self, field, ancestry, AS_CHILD=False): + # ancestry and AS_CHILD are ignored, only used for VRML now + + self_real = self.getRealNode() # incase we're an instance + field_xml = self.x3dNode.getAttributeNode(field) + if field_xml: + value = field_xml.value + + # We may want to edit. for x3d spesific stuff + # Sucks a bit to return the field name in the list but vrml excepts this :/ + return value.split() + else: + return None + + +def x3d_parse(path): + ''' + Sets up the root node and returns it so load_web3d() can deal with the blender side of things. + Return root (x3dNode, '') or (None, 'Error String') + ''' + + try: + import xml.dom.minidom + except: + return None, 'Error, import XML parsing module (xml.dom.minidom) failed, install python' + + ''' + try: doc = xml.dom.minidom.parse(path) + except: return None, 'Could not parse this X3D file, XML error' + ''' + + # Could add a try/except here, but a console error is more useful. + data = gzipOpen(path) + + if data is None: + return None, 'Failed to open file: ' + path + + doc = xml.dom.minidom.parseString(data) + + try: + x3dnode = doc.getElementsByTagName('X3D')[0] + except: + return None, 'Not a valid x3d document, cannot import' + + root = x3dNode(None, NODE_NORMAL, x3dnode) + root.setRoot(path) # so images and Inline's we load have a relative path + root.parse() + + return root, '' + +## f = open('/_Cylinder.wrl', 'r') +# f = open('/fe/wrl/Vrml/EGS/TOUCHSN.WRL', 'r') +# vrml_parse('/fe/wrl/Vrml/EGS/TOUCHSN.WRL') +#vrml_parse('/fe/wrl/Vrml/EGS/SCRIPT.WRL') +''' +import os +files = os.popen('find /fe/wrl -iname "*.wrl"').readlines() +files.sort() +tot = len(files) +for i, f in enumerate(files): + #if i < 801: + # continue + + f = f.strip() + print(f, i, tot) + vrml_parse(f) +''' + +# NO BLENDER CODE ABOVE THIS LINE. +# ----------------------------------------------------------------------------------- +import bpy +from bpy_extras import image_utils +# import BPyImage +# import BPySys +# reload(BPySys) +# reload(BPyImage) +# import Blender +# from Blender import Texture, Material, Mathutils, Mesh, Types, Window +from mathutils import Vector, Matrix + +RAD_TO_DEG = 57.29578 + +GLOBALS = {'CIRCLE_DETAIL': 16} + + +def translateRotation(rot): + ''' axis, angle ''' + return Matrix.Rotation(rot[3], 4, Vector(rot[:3])) + + +def translateScale(sca): + mat = Matrix() # 4x4 default + mat[0][0] = sca[0] + mat[1][1] = sca[1] + mat[2][2] = sca[2] + return mat + + +def translateTransform(node, ancestry): + cent = node.getFieldAsFloatTuple('center', None, ancestry) # (0.0, 0.0, 0.0) + rot = node.getFieldAsFloatTuple('rotation', None, ancestry) # (0.0, 0.0, 1.0, 0.0) + sca = node.getFieldAsFloatTuple('scale', None, ancestry) # (1.0, 1.0, 1.0) + scaori = node.getFieldAsFloatTuple('scaleOrientation', None, ancestry) # (0.0, 0.0, 1.0, 0.0) + tx = node.getFieldAsFloatTuple('translation', None, ancestry) # (0.0, 0.0, 0.0) + + if cent: + cent_mat = Matrix.Translation(cent) + cent_imat = cent_mat.inverted() + else: + cent_mat = cent_imat = None + + if rot: + rot_mat = translateRotation(rot) + else: + rot_mat = None + + if sca: + sca_mat = translateScale(sca) + else: + sca_mat = None + + if scaori: + scaori_mat = translateRotation(scaori) + scaori_imat = scaori_mat.inverted() + else: + scaori_mat = scaori_imat = None + + if tx: + tx_mat = Matrix.Translation(tx) + else: + tx_mat = None + + new_mat = Matrix() + + mats = [tx_mat, cent_mat, rot_mat, scaori_mat, sca_mat, scaori_imat, cent_imat] + for mtx in mats: + if mtx: + new_mat = new_mat * mtx + + return new_mat + + +def translateTexTransform(node, ancestry): + cent = node.getFieldAsFloatTuple('center', None, ancestry) # (0.0, 0.0) + rot = node.getFieldAsFloat('rotation', None, ancestry) # 0.0 + sca = node.getFieldAsFloatTuple('scale', None, ancestry) # (1.0, 1.0) + tx = node.getFieldAsFloatTuple('translation', None, ancestry) # (0.0, 0.0) + + if cent: + # cent is at a corner by default + cent_mat = Matrix.Translation(Vector(cent).to_3d()) + cent_imat = cent_mat.inverted() + else: + cent_mat = cent_imat = None + + if rot: + rot_mat = Matrix.Rotation(rot, 4, 'Z') # translateRotation(rot) + else: + rot_mat = None + + if sca: + sca_mat = translateScale((sca[0], sca[1], 0.0)) + else: + sca_mat = None + + if tx: + tx_mat = Matrix.Translation(Vector(tx).to_3d()) + else: + tx_mat = None + + new_mat = Matrix() + + # as specified in VRML97 docs + mats = [cent_imat, sca_mat, rot_mat, cent_mat, tx_mat] + + for mtx in mats: + if mtx: + new_mat = new_mat * mtx + + return new_mat + + +# 90d X rotation +import math +MATRIX_Z_TO_Y = Matrix.Rotation(math.pi / 2.0, 4, 'X') + + +def getFinalMatrix(node, mtx, ancestry, global_matrix): + + transform_nodes = [node_tx for node_tx in ancestry if node_tx.getSpec() == 'Transform'] + if node.getSpec() == 'Transform': + transform_nodes.append(node) + transform_nodes.reverse() + + if mtx is None: + mtx = Matrix() + + for node_tx in transform_nodes: + mat = translateTransform(node_tx, ancestry) + mtx = mat * mtx + + # worldspace matrix + mtx = global_matrix * mtx + + return mtx + + +def importMesh_IndexedFaceSet(geom, bpyima, ancestry): + # print(geom.lineno, geom.id, vrmlNode.DEF_NAMESPACE.keys()) + + ccw = geom.getFieldAsBool('ccw', True, ancestry) + ifs_colorPerVertex = geom.getFieldAsBool('colorPerVertex', True, ancestry) # per vertex or per face + ifs_normalPerVertex = geom.getFieldAsBool('normalPerVertex', True, ancestry) + + # This is odd how point is inside Coordinate + + # VRML not x3d + #coord = geom.getChildByName('coord') # 'Coordinate' + + coord = geom.getChildBySpec('Coordinate') # works for x3d and vrml + + if coord: + ifs_points = coord.getFieldAsArray('point', 3, ancestry) + else: + coord = [] + + if not coord: + print('\tWarnint: IndexedFaceSet has no points') + return None, ccw + + ifs_faces = geom.getFieldAsArray('coordIndex', 0, ancestry) + + coords_tex = None + if ifs_faces: # In rare cases this causes problems - no faces but UVs??? + + # WORKS - VRML ONLY + # coords_tex = geom.getChildByName('texCoord') + coords_tex = geom.getChildBySpec('TextureCoordinate') + + if coords_tex: + ifs_texpoints = coords_tex.getFieldAsArray('point', 2, ancestry) + ifs_texfaces = geom.getFieldAsArray('texCoordIndex', 0, ancestry) + + if not ifs_texpoints: + # IF we have no coords, then dont bother + coords_tex = None + + # WORKS - VRML ONLY + # vcolor = geom.getChildByName('color') + vcolor = geom.getChildBySpec('Color') + vcolor_spot = None # spot color when we dont have an array of colors + if vcolor: + # float to char + ifs_vcol = [(0, 0, 0)] # EEKADOODLE - vertex start at 1 + ifs_vcol.extend([col for col in vcolor.getFieldAsArray('color', 3, ancestry)]) + ifs_color_index = geom.getFieldAsArray('colorIndex', 0, ancestry) + + if not ifs_vcol: + vcolor_spot = vcolor.getFieldAsFloatTuple('color', [], ancestry) + + # Convert faces into somthing blender can use + edges = [] + + # All lists are aligned! + faces = [] + faces_uv = [] # if ifs_texfaces is empty then the faces_uv will match faces exactly. + faces_orig_index = [] # for ngons, we need to know our original index + + if coords_tex and ifs_texfaces: + do_uvmap = True + else: + do_uvmap = False + + # current_face = [0] # pointer anyone + + def add_face(face, fuvs, orig_index): + l = len(face) + if l == 3 or l == 4: + faces.append(face) + # faces_orig_index.append(current_face[0]) + if do_uvmap: + faces_uv.append(fuvs) + + faces_orig_index.append(orig_index) + elif l == 2: + edges.append(face) + elif l > 4: + for i in range(2, len(face)): + faces.append([face[0], face[i - 1], face[i]]) + if do_uvmap: + faces_uv.append([fuvs[0], fuvs[i - 1], fuvs[i]]) + faces_orig_index.append(orig_index) + else: + # faces with 1 verts? pfft! + # still will affect index ordering + pass + + face = [] + fuvs = [] + orig_index = 0 + for i, fi in enumerate(ifs_faces): + # ifs_texfaces and ifs_faces should be aligned + if fi != -1: + # face.append(int(fi)) # in rare cases this is a float + # EEKADOODLE!!! + # Annoyance where faces that have a zero index vert get rotated. This will then mess up UVs and VColors + face.append(int(fi) + 1) # in rare cases this is a float, +1 because of stupid EEKADOODLE :/ + + if do_uvmap: + if i >= len(ifs_texfaces): + print('\tWarning: UV Texface index out of range') + fuvs.append(ifs_texfaces[0]) + else: + fuvs.append(ifs_texfaces[i]) + else: + add_face(face, fuvs, orig_index) + face = [] + if do_uvmap: + fuvs = [] + orig_index += 1 + + add_face(face, fuvs, orig_index) + del add_face # dont need this func anymore + + bpymesh = bpy.data.meshes.new(name="XXX") + + # EEKADOODLE + bpymesh.vertices.add(1 + (len(ifs_points))) + bpymesh.vertices.foreach_set("co", [0, 0, 0] + [a for v in ifs_points for a in v]) # XXX25 speed + + # print(len(ifs_points), faces, edges, ngons) + + try: + bpymesh.faces.add(len(faces)) + bpymesh.faces.foreach_set("vertices_raw", [a for f in faces for a in (f + [0] if len(f) == 3 else f)]) # XXX25 speed + except KeyError: + print("one or more vert indices out of range. corrupt file?") + #for f in faces: + # bpymesh.faces.extend(faces, smooth=True) + + bpymesh.validate() + bpymesh.update() + + if len(bpymesh.faces) != len(faces): + print('\tWarning: adding faces did not work! file is invalid, not adding UVs or vcolors') + return bpymesh, ccw + + # Apply UVs if we have them + if not do_uvmap: + faces_uv = faces # fallback, we didnt need a uvmap in the first place, fallback to the face/vert mapping. + if coords_tex: + #print(ifs_texpoints) + # print(geom) + uvlay = bpymesh.uv_textures.new() + + for i, f in enumerate(uvlay.data): + f.image = bpyima + fuv = faces_uv[i] # uv indices + for j, uv in enumerate(f.uv): + # print(fuv, j, len(ifs_texpoints)) + try: + f.uv[j] = ifs_texpoints[fuv[j]] # XXX25, speedup + except: + print('\tWarning: UV Index out of range') + f.uv[j] = ifs_texpoints[0] # XXX25, speedup + + elif bpyima and len(bpymesh.faces): + # Oh Bugger! - we cant really use blenders ORCO for for texture space since texspace dosnt rotate. + # we have to create VRML's coords as UVs instead. + + # VRML docs + ''' + If the texCoord field is NULL, a default texture coordinate mapping is calculated using the local + coordinate system bounding box of the shape. The longest dimension of the bounding box defines the S coordinates, + and the next longest defines the T coordinates. If two or all three dimensions of the bounding box are equal, + ties shall be broken by choosing the X, Y, or Z dimension in that order of preference. + The value of the S coordinate ranges from 0 to 1, from one end of the bounding box to the other. + The T coordinate ranges between 0 and the ratio of the second greatest dimension of the bounding box to the greatest dimension. + ''' + + # Note, S,T == U,V + # U gets longest, V gets second longest + xmin, ymin, zmin = ifs_points[0] + xmax, ymax, zmax = ifs_points[0] + for co in ifs_points: + x, y, z = co + if x < xmin: + xmin = x + if y < ymin: + ymin = y + if z < zmin: + zmin = z + + if x > xmax: + xmax = x + if y > ymax: + ymax = y + if z > zmax: + zmax = z + + xlen = xmax - xmin + ylen = ymax - ymin + zlen = zmax - zmin + + depth_min = xmin, ymin, zmin + depth_list = [xlen, ylen, zlen] + depth_sort = depth_list[:] + depth_sort.sort() + + depth_idx = [depth_list.index(val) for val in depth_sort] + + axis_u = depth_idx[-1] + axis_v = depth_idx[-2] # second longest + + # Hack, swap these !!! TODO - Why swap??? - it seems to work correctly but should not. + # axis_u,axis_v = axis_v,axis_u + + min_u = depth_min[axis_u] + min_v = depth_min[axis_v] + depth_u = depth_list[axis_u] + depth_v = depth_list[axis_v] + + depth_list[axis_u] + + if axis_u == axis_v: + # This should be safe because when 2 axies have the same length, the lower index will be used. + axis_v += 1 + + uvlay = bpymesh.uv_textures.new() + + # HACK !!! - seems to be compatible with Cosmo though. + depth_v = depth_u = max(depth_v, depth_u) + + bpymesh_vertices = bpymesh.vertices[:] + bpymesh_faces = bpymesh.faces[:] + + for j, f in enumerate(uvlay.data): + f.image = bpyima + fuv = f.uv + f_v = bpymesh_faces[j].vertices[:] # XXX25 speed + + for i, v in enumerate(f_v): + co = bpymesh_vertices[v].co + fuv[i] = (co[axis_u] - min_u) / depth_u, (co[axis_v] - min_v) / depth_v + + # Add vcote + if vcolor: + # print(ifs_vcol) + collay = bpymesh.vertex_colors.new() + + for f_idx, f in enumerate(collay.data): + fv = bpymesh.faces[f_idx].vertices[:] + if len(fv) == 3: # XXX speed + fcol = f.color1, f.color2, f.color3 + else: + fcol = f.color1, f.color2, f.color3, f.color4 + if ifs_colorPerVertex: + for i, c in enumerate(fcol): + color_index = fv[i] # color index is vert index + if ifs_color_index: + try: + color_index = ifs_color_index[color_index] + except: + print('\tWarning: per vertex color index out of range') + continue + + if color_index < len(ifs_vcol): + c.r, c.g, c.b = ifs_vcol[color_index] + else: + #print('\tWarning: per face color index out of range') + pass + else: + if vcolor_spot: # use 1 color, when ifs_vcol is [] + for c in fcol: + c.r, c.g, c.b = vcolor_spot + else: + color_index = faces_orig_index[f_idx] # color index is face index + #print(color_index, ifs_color_index) + if ifs_color_index: + if color_index >= len(ifs_color_index): + print('\tWarning: per face color index out of range') + color_index = 0 + else: + color_index = ifs_color_index[color_index] + try: + col = ifs_vcol[color_index] + except IndexError: + # TODO, look + col = (1.0, 1.0, 1.0) + for i, c in enumerate(fcol): + c.r, c.g, c.b = col + + # XXX25 + # bpymesh.vertices.delete([0, ]) # EEKADOODLE + + return bpymesh, ccw + + +def importMesh_IndexedLineSet(geom, ancestry): + # VRML not x3d + #coord = geom.getChildByName('coord') # 'Coordinate' + coord = geom.getChildBySpec('Coordinate') # works for x3d and vrml + if coord: + points = coord.getFieldAsArray('point', 3, ancestry) + else: + points = [] + + if not points: + print('\tWarning: IndexedLineSet had no points') + return None + + ils_lines = geom.getFieldAsArray('coordIndex', 0, ancestry) + + lines = [] + line = [] + + for il in ils_lines: + if il == -1: + lines.append(line) + line = [] + else: + line.append(int(il)) + lines.append(line) + + # vcolor = geom.getChildByName('color') # blender dosnt have per vertex color + + bpycurve = bpy.data.curves.new('IndexedCurve', 'CURVE') + bpycurve.dimensions = '3D' + + for line in lines: + if not line: + continue + co = points[line[0]] + nu = bpycurve.splines.new('POLY') + nu.points.add(len(line) - 1) # the new nu has 1 point to begin with + for il, pt in zip(line, nu.points): + pt.co[0:3] = points[il] + + return bpycurve + + +def importMesh_PointSet(geom, ancestry): + # VRML not x3d + #coord = geom.getChildByName('coord') # 'Coordinate' + coord = geom.getChildBySpec('Coordinate') # works for x3d and vrml + if coord: + points = coord.getFieldAsArray('point', 3, ancestry) + else: + points = [] + + # vcolor = geom.getChildByName('color') # blender dosnt have per vertex color + + bpymesh = bpy.data.meshes.new("XXX") + bpymesh.vertices.add(len(points)) + bpymesh.vertices.foreach_set("co", [a for v in points for a in v]) + + # No need to validate + bpymesh.update() + return bpymesh + +GLOBALS['CIRCLE_DETAIL'] = 12 + + +def bpy_ops_add_object_hack(): # XXX25, evil + scene = bpy.context.scene + obj = scene.objects[0] + scene.objects.unlink(obj) + bpymesh = obj.data + bpy.data.objects.remove(obj) + return bpymesh + + +def importMesh_Sphere(geom, ancestry): + diameter = geom.getFieldAsFloat('radius', 0.5, ancestry) + # bpymesh = Mesh.Primitives.UVsphere(GLOBALS['CIRCLE_DETAIL'], GLOBALS['CIRCLE_DETAIL'], diameter) + + bpy.ops.mesh.primitive_uv_sphere_add(segments=GLOBALS['CIRCLE_DETAIL'], + ring_count=GLOBALS['CIRCLE_DETAIL'], + size=diameter, + view_align=False, + enter_editmode=False, + ) + + bpymesh = bpy_ops_add_object_hack() + + bpymesh.transform(MATRIX_Z_TO_Y) + return bpymesh + + +def importMesh_Cylinder(geom, ancestry): + # bpymesh = bpy.data.meshes.new() + diameter = geom.getFieldAsFloat('radius', 1.0, ancestry) + height = geom.getFieldAsFloat('height', 2, ancestry) + + # bpymesh = Mesh.Primitives.Cylinder(GLOBALS['CIRCLE_DETAIL'], diameter, height) + + bpy.ops.mesh.primitive_cylinder_add(vertices=GLOBALS['CIRCLE_DETAIL'], + radius=diameter, + depth=height, + cap_ends=True, + view_align=False, + enter_editmode=False, + ) + + bpymesh = bpy_ops_add_object_hack() + + bpymesh.transform(MATRIX_Z_TO_Y) + + # Warning - Rely in the order Blender adds verts + # not nice design but wont change soon. + + bottom = geom.getFieldAsBool('bottom', True, ancestry) + side = geom.getFieldAsBool('side', True, ancestry) + top = geom.getFieldAsBool('top', True, ancestry) + + if not top: # last vert is top center of tri fan. + # bpymesh.vertices.delete([(GLOBALS['CIRCLE_DETAIL'] + GLOBALS['CIRCLE_DETAIL']) + 1]) # XXX25 + pass + + if not bottom: # second last vert is bottom of triangle fan + # XXX25 + # bpymesh.vertices.delete([GLOBALS['CIRCLE_DETAIL'] + GLOBALS['CIRCLE_DETAIL']]) + pass + + if not side: + # remove all quads + # XXX25 + # bpymesh.faces.delete(1, [f for f in bpymesh.faces if len(f) == 4]) + pass + + return bpymesh + + +def importMesh_Cone(geom, ancestry): + # bpymesh = bpy.data.meshes.new() + diameter = geom.getFieldAsFloat('bottomRadius', 1.0, ancestry) + height = geom.getFieldAsFloat('height', 2, ancestry) + + # bpymesh = Mesh.Primitives.Cone(GLOBALS['CIRCLE_DETAIL'], diameter, height) + + bpy.ops.mesh.primitive_cone_add(vertices=GLOBALS['CIRCLE_DETAIL'], + radius=diameter, + depth=height, + cap_end=True, + view_align=False, + enter_editmode=False, + ) + + bpymesh = bpy_ops_add_object_hack() + + bpymesh.transform(MATRIX_Z_TO_Y) + + # Warning - Rely in the order Blender adds verts + # not nice design but wont change soon. + + bottom = geom.getFieldAsBool('bottom', True, ancestry) + side = geom.getFieldAsBool('side', True, ancestry) + + if not bottom: # last vert is on the bottom + # bpymesh.vertices.delete([GLOBALS['CIRCLE_DETAIL'] + 1]) # XXX25 + pass + if not side: # second last vert is on the pointy bit of the cone + # bpymesh.vertices.delete([GLOBALS['CIRCLE_DETAIL']]) # XXX25 + pass + + return bpymesh + + +def importMesh_Box(geom, ancestry): + # bpymesh = bpy.data.meshes.new() + + size = geom.getFieldAsFloatTuple('size', (2.0, 2.0, 2.0), ancestry) + + # bpymesh = Mesh.Primitives.Cube(1.0) + bpy.ops.mesh.primitive_cube_add(view_align=False, + enter_editmode=False, + ) + + bpymesh = bpy_ops_add_object_hack() + + # Scale the box to the size set + scale_mat = Matrix(((size[0], 0, 0), (0, size[1], 0), (0, 0, size[2]))) * 0.5 + bpymesh.transform(scale_mat.to_4x4()) + + return bpymesh + + +def importShape(node, ancestry, global_matrix): + vrmlname = node.getDefName() + if not vrmlname: + vrmlname = 'Shape' + + # works 100% in vrml, but not x3d + #appr = node.getChildByName('appearance') # , 'Appearance' + #geom = node.getChildByName('geometry') # , 'IndexedFaceSet' + + # Works in vrml and x3d + appr = node.getChildBySpec('Appearance') + geom = node.getChildBySpec(['IndexedFaceSet', 'IndexedLineSet', 'PointSet', 'Sphere', 'Box', 'Cylinder', 'Cone']) + + # For now only import IndexedFaceSet's + if geom: + bpymat = None + bpyima = None + texmtx = None + + depth = 0 # so we can set alpha face flag later + + if appr: + + #mat = appr.getChildByName('material') # 'Material' + #ima = appr.getChildByName('texture') # , 'ImageTexture' + #if ima and ima.getSpec() != 'ImageTexture': + # print('\tWarning: texture type "%s" is not supported' % ima.getSpec()) + # ima = None + # textx = appr.getChildByName('textureTransform') + + mat = appr.getChildBySpec('Material') + ima = appr.getChildBySpec('ImageTexture') + + textx = appr.getChildBySpec('TextureTransform') + + if textx: + texmtx = translateTexTransform(textx, ancestry) + + # print(mat, ima) + if mat or ima: + + if not mat: + mat = ima # This is a bit dumb, but just means we use default values for all + + # all values between 0.0 and 1.0, defaults from VRML docs + bpymat = bpy.data.materials.new("XXX") + bpymat.ambient = mat.getFieldAsFloat('ambientIntensity', 0.2, ancestry) + bpymat.diffuse_color = mat.getFieldAsFloatTuple('diffuseColor', [0.8, 0.8, 0.8], ancestry) + + # NOTE - blender dosnt support emmisive color + # Store in mirror color and approximate with emit. + emit = mat.getFieldAsFloatTuple('emissiveColor', [0.0, 0.0, 0.0], ancestry) + bpymat.mirror_color = emit + bpymat.emit = (emit[0] + emit[1] + emit[2]) / 3.0 + + bpymat.specular_hardness = int(1 + (510 * mat.getFieldAsFloat('shininess', 0.2, ancestry))) # 0-1 -> 1-511 + bpymat.specular_color = mat.getFieldAsFloatTuple('specularColor', [0.0, 0.0, 0.0], ancestry) + bpymat.alpha = 1.0 - mat.getFieldAsFloat('transparency', 0.0, ancestry) + if bpymat.alpha < 0.999: + bpymat.use_transparency = True + + if ima: + ima_url = ima.getFieldAsString('url', None, ancestry) + + if ima_url is None: + try: + ima_url = ima.getFieldAsStringArray('url', ancestry)[0] # in some cases we get a list of images. + except: + ima_url = None + + if ima_url is None: + print("\twarning, image with no URL, this is odd") + else: + bpyima = image_utils.image_load(ima_url, os.path.dirname(node.getFilename()), place_holder=False, recursive=False, convert_callback=imageConvertCompat) + if bpyima: + texture = bpy.data.textures.new("XXX", 'IMAGE') + texture.image = bpyima + + # Adds textures for materials (rendering) + try: + depth = bpyima.depth + except: + depth = -1 + + if depth == 32: + # Image has alpha + bpymat.setTexture(0, texture, Texture.TexCo.UV, Texture.MapTo.COL | Texture.MapTo.ALPHA) + texture.setImageFlags('MipMap', 'InterPol', 'UseAlpha') + bpymat.mode |= Material.Modes.ZTRANSP + bpymat.alpha = 0.0 + else: + mtex = bpymat.texture_slots.add() + mtex.texture = texture + mtex.texture_coords = 'UV' + mtex.use_map_diffuse = True + + ima_repS = ima.getFieldAsBool('repeatS', True, ancestry) + ima_repT = ima.getFieldAsBool('repeatT', True, ancestry) + + # To make this work properly we'd need to scale the UV's too, better to ignore th + # texture.repeat = max(1, ima_repS * 512), max(1, ima_repT * 512) + + if not ima_repS: + bpyima.use_clamp_x = True + if not ima_repT: + bpyima.use_clamp_y = True + + bpydata = None + geom_spec = geom.getSpec() + ccw = True + if geom_spec == 'IndexedFaceSet': + bpydata, ccw = importMesh_IndexedFaceSet(geom, bpyima, ancestry) + elif geom_spec == 'IndexedLineSet': + bpydata = importMesh_IndexedLineSet(geom, ancestry) + elif geom_spec == 'PointSet': + bpydata = importMesh_PointSet(geom, ancestry) + elif geom_spec == 'Sphere': + bpydata = importMesh_Sphere(geom, ancestry) + elif geom_spec == 'Box': + bpydata = importMesh_Box(geom, ancestry) + elif geom_spec == 'Cylinder': + bpydata = importMesh_Cylinder(geom, ancestry) + elif geom_spec == 'Cone': + bpydata = importMesh_Cone(geom, ancestry) + else: + print('\tWarning: unsupported type "%s"' % geom_spec) + return + + if bpydata: + vrmlname = vrmlname + geom_spec + + bpydata.name = vrmlname + + bpyob = node.blendObject = bpy.data.objects.new(vrmlname, bpydata) + bpy.context.scene.objects.link(bpyob) + + if type(bpydata) == bpy.types.Mesh: + is_solid = geom.getFieldAsBool('solid', True, ancestry) + creaseAngle = geom.getFieldAsFloat('creaseAngle', None, ancestry) + + if creaseAngle is not None: + bpydata.auto_smooth_angle = creaseAngle + bpydata.use_auto_smooth = True + + # Only ever 1 material per shape + if bpymat: + bpydata.materials.append(bpymat) + + if bpydata.uv_textures: + + if depth == 32: # set the faces alpha flag? + transp = Mesh.FaceTranspModes.ALPHA + for f in bpydata.uv_textures.active.data: + f.blend_type = 'ALPHA' + + if texmtx: + # Apply texture transform? + uv_copy = Vector() + for f in bpydata.uv_textures.active.data: + fuv = f.uv + for i, uv in enumerate(fuv): + uv_copy.x = uv[0] + uv_copy.y = uv[1] + + fuv[i] = (uv_copy * texmtx)[0:2] + # Done transforming the texture + + # Must be here and not in IndexedFaceSet because it needs an object for the flip func. Messy :/ + if not ccw: + # bpydata.flipNormals() + # XXX25 + pass + + # else could be a curve for example + + # Can transform data or object, better the object so we can instance the data + #bpymesh.transform(getFinalMatrix(node)) + bpyob.matrix_world = getFinalMatrix(node, None, ancestry, global_matrix) + + +def importLamp_PointLight(node, ancestry): + vrmlname = node.getDefName() + if not vrmlname: + vrmlname = 'PointLight' + + # ambientIntensity = node.getFieldAsFloat('ambientIntensity', 0.0, ancestry) # TODO + # attenuation = node.getFieldAsFloatTuple('attenuation', (1.0, 0.0, 0.0), ancestry) # TODO + color = node.getFieldAsFloatTuple('color', (1.0, 1.0, 1.0), ancestry) + intensity = node.getFieldAsFloat('intensity', 1.0, ancestry) # max is documented to be 1.0 but some files have higher. + location = node.getFieldAsFloatTuple('location', (0.0, 0.0, 0.0), ancestry) + # is_on = node.getFieldAsBool('on', True, ancestry) # TODO + radius = node.getFieldAsFloat('radius', 100.0, ancestry) + + bpylamp = bpy.data.lamps.new("ToDo", 'POINT') + bpylamp.energy = intensity + bpylamp.distance = radius + bpylamp.color = color + + mtx = Matrix.Translation(Vector(location)) + + return bpylamp, mtx + + +def importLamp_DirectionalLight(node, ancestry): + vrmlname = node.getDefName() + if not vrmlname: + vrmlname = 'DirectLight' + + # ambientIntensity = node.getFieldAsFloat('ambientIntensity', 0.0) # TODO + color = node.getFieldAsFloatTuple('color', (1.0, 1.0, 1.0), ancestry) + direction = node.getFieldAsFloatTuple('direction', (0.0, 0.0, -1.0), ancestry) + intensity = node.getFieldAsFloat('intensity', 1.0, ancestry) # max is documented to be 1.0 but some files have higher. + # is_on = node.getFieldAsBool('on', True, ancestry) # TODO + + bpylamp = bpy.data.lamps.new(vrmlname, 'SUN') + bpylamp.energy = intensity + bpylamp.color = color + + # lamps have their direction as -z, yup + mtx = Vector(direction).to_track_quat('-Z', 'Y').to_matrix().to_4x4() + + return bpylamp, mtx + +# looks like default values for beamWidth and cutOffAngle were swapped in VRML docs. + + +def importLamp_SpotLight(node, ancestry): + vrmlname = node.getDefName() + if not vrmlname: + vrmlname = 'SpotLight' + + # ambientIntensity = geom.getFieldAsFloat('ambientIntensity', 0.0, ancestry) # TODO + # attenuation = geom.getFieldAsFloatTuple('attenuation', (1.0, 0.0, 0.0), ancestry) # TODO + beamWidth = node.getFieldAsFloat('beamWidth', 1.570796, ancestry) # max is documented to be 1.0 but some files have higher. + color = node.getFieldAsFloatTuple('color', (1.0, 1.0, 1.0), ancestry) + cutOffAngle = node.getFieldAsFloat('cutOffAngle', 0.785398, ancestry) * 2.0 # max is documented to be 1.0 but some files have higher. + direction = node.getFieldAsFloatTuple('direction', (0.0, 0.0, -1.0), ancestry) + intensity = node.getFieldAsFloat('intensity', 1.0, ancestry) # max is documented to be 1.0 but some files have higher. + location = node.getFieldAsFloatTuple('location', (0.0, 0.0, 0.0), ancestry) + # is_on = node.getFieldAsBool('on', True, ancestry) # TODO + radius = node.getFieldAsFloat('radius', 100.0, ancestry) + + bpylamp = bpy.data.lamps.new(vrmlname, 'SPOT') + bpylamp.energy = intensity + bpylamp.distance = radius + bpylamp.color = color + bpylamp.spot_size = cutOffAngle + if beamWidth > cutOffAngle: + bpylamp.spot_blend = 0.0 + else: + if cutOffAngle == 0.0: # this should never happen! + bpylamp.spot_blend = 0.5 + else: + bpylamp.spot_blend = beamWidth / cutOffAngle + + # Convert + + # lamps have their direction as -z, y==up + mtx = Matrix.Translation(location) * Vector(direction).to_track_quat('-Z', 'Y').to_matrix().to_4x4() + + return bpylamp, mtx + + +def importLamp(node, spec, ancestry, global_matrix): + if spec == 'PointLight': + bpylamp, mtx = importLamp_PointLight(node, ancestry) + elif spec == 'DirectionalLight': + bpylamp, mtx = importLamp_DirectionalLight(node, ancestry) + elif spec == 'SpotLight': + bpylamp, mtx = importLamp_SpotLight(node, ancestry) + else: + print("Error, not a lamp") + raise ValueError + + bpyob = node.blendObject = bpy.data.objects.new("TODO", bpylamp) + bpy.context.scene.objects.link(bpyob) + + bpyob.matrix_world = getFinalMatrix(node, mtx, ancestry, global_matrix) + + +def importViewpoint(node, ancestry, global_matrix): + name = node.getDefName() + if not name: + name = 'Viewpoint' + + fieldOfView = node.getFieldAsFloat('fieldOfView', 0.785398, ancestry) # max is documented to be 1.0 but some files have higher. + # jump = node.getFieldAsBool('jump', True, ancestry) + orientation = node.getFieldAsFloatTuple('orientation', (0.0, 0.0, 1.0, 0.0), ancestry) + position = node.getFieldAsFloatTuple('position', (0.0, 0.0, 0.0), ancestry) + description = node.getFieldAsString('description', '', ancestry) + + bpycam = bpy.data.cameras.new(name) + + bpycam.angle = fieldOfView + + mtx = Matrix.Translation(Vector(position)) * translateRotation(orientation) + + bpyob = node.blendObject = bpy.data.objects.new(name, bpycam) + bpy.context.scene.objects.link(bpyob) + bpyob.matrix_world = getFinalMatrix(node, mtx, ancestry, global_matrix) + + +def importTransform(node, ancestry, global_matrix): + name = node.getDefName() + if not name: + name = 'Transform' + + bpyob = node.blendObject = bpy.data.objects.new(name, None) + bpy.context.scene.objects.link(bpyob) + + bpyob.matrix_world = getFinalMatrix(node, None, ancestry, global_matrix) + + # so they are not too annoying + bpyob.empty_draw_type = 'PLAIN_AXES' + bpyob.empty_draw_size = 0.2 + + +#def importTimeSensor(node): +def action_fcurve_ensure(action, data_path, array_index): + for fcu in action.fcurves: + if fcu.data_path == data_path and fcu.array_index == array_index: + return fcu + + return action.fcurves.new(data_path=data_path, index=array_index) + + +def translatePositionInterpolator(node, action, ancestry): + key = node.getFieldAsArray('key', 0, ancestry) + keyValue = node.getFieldAsArray('keyValue', 3, ancestry) + + loc_x = action_fcurve_ensure(action, "location", 0) + loc_y = action_fcurve_ensure(action, "location", 1) + loc_z = action_fcurve_ensure(action, "location", 2) + + for i, time in enumerate(key): + try: + x, y, z = keyValue[i] + except: + continue + + loc_x.keyframe_points.insert(time, x) + loc_y.keyframe_points.insert(time, y) + loc_z.keyframe_points.insert(time, z) + + for fcu in (loc_x, loc_y, loc_z): + for kf in fcu.keyframe_points: + kf.interpolation = 'LINEAR' + + +def translateOrientationInterpolator(node, action, ancestry): + key = node.getFieldAsArray('key', 0, ancestry) + keyValue = node.getFieldAsArray('keyValue', 4, ancestry) + + rot_x = action_fcurve_ensure(action, "rotation_euler", 0) + rot_y = action_fcurve_ensure(action, "rotation_euler", 1) + rot_z = action_fcurve_ensure(action, "rotation_euler", 2) + + for i, time in enumerate(key): + try: + x, y, z, w = keyValue[i] + except: + continue + + mtx = translateRotation((x, y, z, w)) + eul = mtx.to_euler() + rot_x.keyframe_points.insert(time, eul.x) + rot_y.keyframe_points.insert(time, eul.y) + rot_z.keyframe_points.insert(time, eul.z) + + for fcu in (rot_x, rot_y, rot_z): + for kf in fcu.keyframe_points: + kf.interpolation = 'LINEAR' + + +# Untested! +def translateScalarInterpolator(node, action, ancestry): + key = node.getFieldAsArray('key', 0, ancestry) + keyValue = node.getFieldAsArray('keyValue', 4, ancestry) + + sca_x = action_fcurve_ensure(action, "scale", 0) + sca_y = action_fcurve_ensure(action, "scale", 1) + sca_z = action_fcurve_ensure(action, "scale", 2) + + for i, time in enumerate(key): + try: + x, y, z = keyValue[i] + except: + continue + + sca_x.keyframe_points.new(time, x) + sca_y.keyframe_points.new(time, y) + sca_z.keyframe_points.new(time, z) + + +def translateTimeSensor(node, action, ancestry): + ''' + Apply a time sensor to an action, VRML has many combinations of loop/start/stop/cycle times + to give different results, for now just do the basics + ''' + + # XXX25 TODO + if 1: + return + + time_cu = action.addCurve('Time') + time_cu.interpolation = Blender.IpoCurve.InterpTypes.LINEAR + + cycleInterval = node.getFieldAsFloat('cycleInterval', None, ancestry) + + startTime = node.getFieldAsFloat('startTime', 0.0, ancestry) + stopTime = node.getFieldAsFloat('stopTime', 250.0, ancestry) + + if cycleInterval != None: + stopTime = startTime + cycleInterval + + loop = node.getFieldAsBool('loop', False, ancestry) + + time_cu.append((1 + startTime, 0.0)) + time_cu.append((1 + stopTime, 1.0 / 10.0)) # anoying, the UI uses /10 + + if loop: + time_cu.extend = Blender.IpoCurve.ExtendTypes.CYCLIC # or - EXTRAP, CYCLIC_EXTRAP, CONST, + + +def importRoute(node, ancestry): + ''' + Animation route only at the moment + ''' + + if not hasattr(node, 'fields'): + return + + routeIpoDict = node.getRouteIpoDict() + + def getIpo(id): + try: + action = routeIpoDict[id] + except: + action = routeIpoDict[id] = bpy.data.actions.new('web3d_ipo') + return action + + # for getting definitions + defDict = node.getDefDict() + ''' + Handles routing nodes to eachother + +ROUTE vpPI.value_changed TO champFly001.set_position +ROUTE vpOI.value_changed TO champFly001.set_orientation +ROUTE vpTs.fraction_changed TO vpPI.set_fraction +ROUTE vpTs.fraction_changed TO vpOI.set_fraction +ROUTE champFly001.bindTime TO vpTs.set_startTime + ''' + + #from_id, from_type = node.id[1].split('.') + #to_id, to_type = node.id[3].split('.') + + #value_changed + set_position_node = None + set_orientation_node = None + time_node = None + + for field in node.fields: + if field and field[0] == 'ROUTE': + try: + from_id, from_type = field[1].split('.') + to_id, to_type = field[3].split('.') + except: + print("Warning, invalid ROUTE", field) + continue + + if from_type == 'value_changed': + if to_type == 'set_position': + action = getIpo(to_id) + set_data_from_node = defDict[from_id] + translatePositionInterpolator(set_data_from_node, action, ancestry) + + if to_type in {'set_orientation', 'rotation'}: + action = getIpo(to_id) + set_data_from_node = defDict[from_id] + translateOrientationInterpolator(set_data_from_node, action, ancestry) + + if to_type == 'set_scale': + action = getIpo(to_id) + set_data_from_node = defDict[from_id] + translateScalarInterpolator(set_data_from_node, action, ancestry) + + elif from_type == 'bindTime': + action = getIpo(from_id) + time_node = defDict[to_id] + translateTimeSensor(time_node, action, ancestry) + + +def load_web3d(path, + PREF_FLAT=False, + PREF_CIRCLE_DIV=16, + global_matrix=None, + HELPER_FUNC=None, + ): + + # Used when adding blender primitives + GLOBALS['CIRCLE_DETAIL'] = PREF_CIRCLE_DIV + + #root_node = vrml_parse('/_Cylinder.wrl') + if path.lower().endswith('.x3d'): + root_node, msg = x3d_parse(path) + else: + root_node, msg = vrml_parse(path) + + if not root_node: + print(msg) + return + + if global_matrix is None: + global_matrix = Matrix() + + # fill with tuples - (node, [parents-parent, parent]) + all_nodes = root_node.getSerialized([], []) + + for node, ancestry in all_nodes: + #if 'castle.wrl' not in node.getFilename(): + # continue + + spec = node.getSpec() + ''' + prefix = node.getPrefix() + if prefix=='PROTO': + pass + else + ''' + if HELPER_FUNC and HELPER_FUNC(node, ancestry): + # Note, include this function so the VRML/X3D importer can be extended + # by an external script. - gets first pick + pass + if spec == 'Shape': + importShape(node, ancestry, global_matrix) + elif spec in {'PointLight', 'DirectionalLight', 'SpotLight'}: + importLamp(node, spec, ancestry, global_matrix) + elif spec == 'Viewpoint': + importViewpoint(node, ancestry, global_matrix) + elif spec == 'Transform': + # Only use transform nodes when we are not importing a flat object hierarchy + if PREF_FLAT == False: + importTransform(node, ancestry, global_matrix) + ''' + # These are delt with later within importRoute + elif spec=='PositionInterpolator': + action = bpy.data.ipos.new('web3d_ipo', 'Object') + translatePositionInterpolator(node, action) + ''' + + # After we import all nodes, route events - anim paths + for node, ancestry in all_nodes: + importRoute(node, ancestry) + + for node, ancestry in all_nodes: + if node.isRoot(): + # we know that all nodes referenced from will be in + # routeIpoDict so no need to run node.getDefDict() for every node. + routeIpoDict = node.getRouteIpoDict() + defDict = node.getDefDict() + + for key, action in routeIpoDict.items(): + + # Assign anim curves + node = defDict[key] + if node.blendObject is None: # Add an object if we need one for animation + node.blendObject = bpy.data.objects.new('AnimOb', None) # , name) + bpy.context.scene.objects.link(node.blendObject) + + if node.blendObject.animation_data is None: + node.blendObject.animation_data_create() + + node.blendObject.animation_data.action = action + + # Add in hierarchy + if PREF_FLAT == False: + child_dict = {} + for node, ancestry in all_nodes: + if node.blendObject: + blendObject = None + + # Get the last parent + i = len(ancestry) + while i: + i -= 1 + blendObject = ancestry[i].blendObject + if blendObject: + break + + if blendObject: + # Parent Slow, - 1 liner but works + # blendObject.makeParent([node.blendObject], 0, 1) + + # Parent FAST + try: + child_dict[blendObject].append(node.blendObject) + except: + child_dict[blendObject] = [node.blendObject] + + # Parent + for parent, children in child_dict.items(): + for c in children: + c.parent = parent + + # update deps + bpy.context.scene.update() + del child_dict + + +def load(operator, context, filepath="", global_matrix=None): + + load_web3d(filepath, + PREF_FLAT=True, + PREF_CIRCLE_DIV=16, + global_matrix=global_matrix, + ) + + return {'FINISHED'} |