Welcome to mirror list, hosted at ThFree Co, Russian Federation.

git.blender.org/blender.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
Diffstat (limited to 'release/scripts')
-rw-r--r--release/scripts/io/export_obj.py88
-rw-r--r--release/scripts/io/import_scene_obj.py94
-rw-r--r--release/scripts/io/netrender/client.py10
-rw-r--r--release/scripts/io/netrender/master.py21
-rw-r--r--release/scripts/io/netrender/operators.py7
-rw-r--r--release/scripts/io/netrender/slave.py30
-rw-r--r--release/scripts/io/netrender/utils.py13
-rw-r--r--release/scripts/ui/properties_world.py13
-rw-r--r--release/scripts/ui/space_image.py2
-rw-r--r--release/scripts/ui/space_userpref.py11
-rw-r--r--release/scripts/ui/space_userpref_keymap.py100
11 files changed, 172 insertions, 217 deletions
diff --git a/release/scripts/io/export_obj.py b/release/scripts/io/export_obj.py
index a25daedf1bb..3a551c9aa7e 100644
--- a/release/scripts/io/export_obj.py
+++ b/release/scripts/io/export_obj.py
@@ -42,16 +42,6 @@ import shutil
import bpy
import mathutils
-
-# Returns a tuple - path,extension.
-# 'hello.obj' > ('hello', '.obj')
-def splitExt(path):
- dotidx = path.rfind('.')
- if dotidx == -1:
- return path, ''
- else:
- return path[:dotidx], path[dotidx:]
-
def fixName(name):
if name == None:
return 'None'
@@ -428,9 +418,12 @@ def write_file(filepath, objects, scene,
if EXPORT_UV:
faceuv = len(me.uv_textures) > 0
+ uv_layer = me.active_uv_texture.data[:]
else:
faceuv = False
+ me_verts = me.verts[:]
+
# XXX - todo, find a better way to do triangulation
# ...removed convert_to_triface because it relies on editmesh
'''
@@ -503,10 +496,7 @@ def write_file(filepath, objects, scene,
if EXPORT_KEEP_VERT_ORDER:
pass
elif faceuv:
- # XXX update
- tface = me.active_uv_texture.data
-
- face_index_pairs.sort(key=lambda a: (a[0].material_index, hash(tface[a[1]].image), a[0].smooth))
+ face_index_pairs.sort(key=lambda a: (a[0].material_index, hash(uv_layer[a[1]].image), a[0].smooth))
elif len(materials) > 1:
face_index_pairs.sort(key = lambda a: (a[0].material_index, a[0].smooth))
else:
@@ -525,8 +515,6 @@ def write_file(filepath, objects, scene,
# try: faces.sort(key = lambda a: a.smooth)
# except: faces.sort(lambda a,b: cmp(a.smooth, b.smooth))
- faces = [pair[0] for pair in face_index_pairs]
-
# Set the default mat to no material and no image.
contextMat = (0, 0) # Can never be this, so we will label a new material teh first chance we get.
contextSmooth = None # Will either be true or false, set bad to force initialization switch.
@@ -546,28 +534,17 @@ def write_file(filepath, objects, scene,
# Vert
- for v in me.verts:
+ for v in me_verts:
file.write('v %.6f %.6f %.6f\n' % tuple(v.co))
# UV
if faceuv:
- uv_face_mapping = [[0,0,0,0] for f in faces] # a bit of a waste for tri's :/
+ uv_face_mapping = [[0,0,0,0] for i in range(len(face_index_pairs))] # a bit of a waste for tri's :/
uv_dict = {} # could use a set() here
- uv_layer = me.active_uv_texture
+ uv_layer = me.active_uv_texture.data
for f, f_index in face_index_pairs:
-
- tface = uv_layer.data[f_index]
-
- # workaround, since tface.uv iteration is wrong atm
- uvs = tface.uv
- # uvs = [tface.uv1, tface.uv2, tface.uv3]
-
- # # add another UV if it's a quad
- # if len(f.verts) == 4:
- # uvs.append(tface.uv4)
-
- for uv_index, uv in enumerate(uvs):
+ for uv_index, uv in enumerate(uv_layer[f_index].uv):
uvkey = veckey2d(uv)
try:
uv_face_mapping[f_index][uv_index] = uv_dict[uvkey]
@@ -575,27 +552,16 @@ def write_file(filepath, objects, scene,
uv_face_mapping[f_index][uv_index] = uv_dict[uvkey] = len(uv_dict)
file.write('vt %.6f %.6f\n' % tuple(uv))
-# uv_dict = {} # could use a set() here
-# for f_index, f in enumerate(faces):
-
-# for uv_index, uv in enumerate(f.uv):
-# uvkey = veckey2d(uv)
-# try:
-# uv_face_mapping[f_index][uv_index] = uv_dict[uvkey]
-# except:
-# uv_face_mapping[f_index][uv_index] = uv_dict[uvkey] = len(uv_dict)
-# file.write('vt %.6f %.6f\n' % tuple(uv))
-
uv_unique_count = len(uv_dict)
# del uv, uvkey, uv_dict, f_index, uv_index
# Only need uv_unique_count and uv_face_mapping
# NORMAL, Smooth/Non smoothed.
if EXPORT_NORMALS:
- for f in faces:
+ for f, f_index in face_index_pairs:
if f.smooth:
- for vIdx in f.verts:
- v = me.verts[vIdx]
+ for v_idx in f.verts:
+ v = me_verts[v_idx]
noKey = veckey3d(v.normal)
if noKey not in globalNormals:
globalNormals[noKey] = totno
@@ -619,16 +585,16 @@ def write_file(filepath, objects, scene,
currentVGroup = ''
# Create a dictionary keyed by face id and listing, for each vertex, the vertex groups it belongs to
- vgroupsMap = [[] for _i in range(len(me.verts))]
-# vgroupsMap = [[] for _i in xrange(len(me.verts))]
+ vgroupsMap = [[] for _i in range(len(me_verts))]
+# vgroupsMap = [[] for _i in xrange(len(me_verts))]
for g in ob.vertex_groups:
# for vertexGroupName in vertGroupNames:
- for vIdx, vWeight in getVertsFromGroup(me, g.index):
-# for vIdx, vWeight in me.getVertsFromGroup(vertexGroupName, 1):
- vgroupsMap[vIdx].append((g.name, vWeight))
+ for v_idx, vWeight in getVertsFromGroup(me, g.index):
+# for v_idx, vWeight in me.getVertsFromGroup(vertexGroupName, 1):
+ vgroupsMap[v_idx].append((g.name, vWeight))
for f, f_index in face_index_pairs:
- f_v = [{"index": index, "vertex": me.verts[index]} for index in f.verts]
+ f_v = [me_verts[v_idx] for v_idx in f.verts]
# if f.verts[3] == 0:
# f_v.pop()
@@ -639,7 +605,7 @@ def write_file(filepath, objects, scene,
# f_mat = min(f.mat, len(materialNames)-1)
if faceuv:
- tface = me.active_uv_texture.data[f_index]
+ tface = uv_layer[f_index]
f_image = tface.image
f_uv = tface.uv
@@ -718,21 +684,21 @@ def write_file(filepath, objects, scene,
if f_smooth: # Smoothed, use vertex normals
for vi, v in enumerate(f_v):
file.write( ' %d/%d/%d' % \
- (v["index"] + totverts,
+ (v.index + totverts,
totuvco + uv_face_mapping[f_index][vi],
- globalNormals[ veckey3d(v["vertex"].normal) ]) ) # vert, uv, normal
+ globalNormals[ veckey3d(v.normal) ]) ) # vert, uv, normal
else: # No smoothing, face normals
no = globalNormals[ veckey3d(f.normal) ]
for vi, v in enumerate(f_v):
file.write( ' %d/%d/%d' % \
- (v["index"] + totverts,
+ (v.index + totverts,
totuvco + uv_face_mapping[f_index][vi],
no) ) # vert, uv, normal
else: # No Normals
for vi, v in enumerate(f_v):
file.write( ' %d/%d' % (\
- v["index"] + totverts,\
+ v.index + totverts,\
totuvco + uv_face_mapping[f_index][vi])) # vert, uv
face_vert_index += len(f_v)
@@ -742,14 +708,14 @@ def write_file(filepath, objects, scene,
if f_smooth: # Smoothed, use vertex normals
for v in f_v:
file.write( ' %d//%d' %
- (v["index"] + totverts, globalNormals[ veckey3d(v["vertex"].normal) ]) )
+ (v.index + totverts, globalNormals[ veckey3d(v.normal) ]) )
else: # No smoothing, face normals
no = globalNormals[ veckey3d(f.normal) ]
for v in f_v:
- file.write( ' %d//%d' % (v["index"] + totverts, no) )
+ file.write( ' %d//%d' % (v.index + totverts, no) )
else: # No Normals
for v in f_v:
- file.write( ' %d' % (v["index"] + totverts) )
+ file.write( ' %d' % (v.index + totverts) )
file.write('\n')
@@ -760,7 +726,7 @@ def write_file(filepath, objects, scene,
file.write('f %d %d\n' % (ed.verts[0] + totverts, ed.verts[1] + totverts))
# Make the indicies global rather then per mesh
- totverts += len(me.verts)
+ totverts += len(me_verts)
if faceuv:
totuvco += uv_unique_count
@@ -809,7 +775,7 @@ def write(filepath, context,
EXPORT_ALL_SCENES, # XXX not working atm
EXPORT_ANIMATION): # Not used
- base_name, ext = splitExt(filepath)
+ base_name, ext = os.path.splitext(filepath)
context_name = [base_name, '', '', ext] # Base name, scene name, frame number, extension
orig_scene = context.scene
diff --git a/release/scripts/io/import_scene_obj.py b/release/scripts/io/import_scene_obj.py
index 3c976c4cf92..3827b3225b6 100644
--- a/release/scripts/io/import_scene_obj.py
+++ b/release/scripts/io/import_scene_obj.py
@@ -375,38 +375,32 @@ def create_materials(filepath, material_libs, unique_materials, unique_material_
# Image has alpha
# XXX bitmask won't work?
- blender_material.add_texture(texture, "UV", ("COLOR", "ALPHA"))
+ blender_material.add_texture(texture, 'UV', {'COLOR', 'ALPHA'})
texture.mipmap = True
texture.interpolation = True
texture.use_alpha = True
- blender_material.z_transparency = True
+ blender_material.transparency = True
blender_material.alpha = 0.0
-
-# blender_material.setTexture(0, texture, Texture.TexCo.UV, Texture.MapTo.COL | Texture.MapTo.ALPHA)
-# texture.setImageFlags('MipMap', 'InterPol', 'UseAlpha')
-# blender_material.mode |= Material.Modes.ZTRANSP
-# blender_material.alpha = 0.0
else:
- blender_material.add_texture(texture, "UV", "COLOR")
-# blender_material.setTexture(0, texture, Texture.TexCo.UV, Texture.MapTo.COL)
+ blender_material.add_texture(texture, 'UV', 'COLOR')
# adds textures to faces (Textured/Alt-Z mode)
# Only apply the diffuse texture to the face if the image has not been set with the inline usemat func.
unique_material_images[context_material_name]= image, has_data # set the texface image
elif type == 'Ka':
- blender_material.add_texture(texture, "UV", "AMBIENT")
+ blender_material.add_texture(texture, 'UV', 'AMBIENT')
# blender_material.setTexture(1, texture, Texture.TexCo.UV, Texture.MapTo.CMIR) # TODO- Add AMB to BPY API
elif type == 'Ks':
- blender_material.add_texture(texture, "UV", "SPECULARITY")
+ blender_material.add_texture(texture, 'UV', 'SPECULARITY')
# blender_material.setTexture(2, texture, Texture.TexCo.UV, Texture.MapTo.SPEC)
elif type == 'Bump':
- blender_material.add_texture(texture, "UV", "NORMAL")
+ blender_material.add_texture(texture, 'UV', 'NORMAL')
# blender_material.setTexture(3, texture, Texture.TexCo.UV, Texture.MapTo.NOR)
elif type == 'D':
- blender_material.add_texture(texture, "UV", "ALPHA")
+ blender_material.add_texture(texture, 'UV', 'ALPHA')
blender_material.z_transparency = True
blender_material.alpha = 0.0
# blender_material.setTexture(4, texture, Texture.TexCo.UV, Texture.MapTo.ALPHA)
@@ -415,7 +409,7 @@ def create_materials(filepath, material_libs, unique_materials, unique_material_
# Todo, unset deffuse material alpha if it has an alpha channel
elif type == 'refl':
- blender_material.add_texture(texture, "UV", "REFLECTION")
+ blender_material.add_texture(texture, 'UV', 'REFLECTION')
# blender_material.setTexture(5, texture, Texture.TexCo.UV, Texture.MapTo.REF)
@@ -503,7 +497,7 @@ def create_materials(filepath, material_libs, unique_materials, unique_material_
-def split_mesh(verts_loc, faces, unique_materials, filepath, SPLIT_OB_OR_GROUP, SPLIT_MATERIALS):
+def split_mesh(verts_loc, faces, unique_materials, filepath, SPLIT_OB_OR_GROUP):
'''
Takes vert_loc and faces, and separates into multiple sets of
(verts_loc, faces, unique_materials, dataname)
@@ -511,41 +505,24 @@ def split_mesh(verts_loc, faces, unique_materials, filepath, SPLIT_OB_OR_GROUP,
filename = os.path.splitext((os.path.basename(filepath)))[0]
- if not SPLIT_OB_OR_GROUP and not SPLIT_MATERIALS:
+ if not SPLIT_OB_OR_GROUP:
# use the filename for the object name since we arnt chopping up the mesh.
return [(verts_loc, faces, unique_materials, filename)]
-
def key_to_name(key):
# if the key is a tuple, join it to make a string
- if type(key) == tuple:
- return '%s_%s' % key
- elif not key:
+ if not key:
return filename # assume its a string. make sure this is true if the splitting code is changed
else:
return key
# Return a key that makes the faces unique.
- if SPLIT_OB_OR_GROUP and not SPLIT_MATERIALS:
- def face_key(face):
- return face[4] # object
-
- elif not SPLIT_OB_OR_GROUP and SPLIT_MATERIALS:
- def face_key(face):
- return face[2] # material
-
- else: # Both
- def face_key(face):
- return face[4], face[2] # object,material
-
-
face_split_dict= {}
oldkey= -1 # initialize to a value that will never match the key
for face in faces:
-
- key= face_key(face)
+ key= face[4]
if oldkey != key:
# Check the key has changed.
@@ -570,7 +547,6 @@ def split_mesh(verts_loc, faces, unique_materials, filepath, SPLIT_OB_OR_GROUP,
vert_remap[i]= new_index # set the new remapped index so we only add once and can reference next time.
face_vert_loc_indicies[enum] = new_index # remap to the local index
verts_split.append( verts_loc[i] ) # add the vert to the local verts
-
else:
face_vert_loc_indicies[enum] = vert_remap[i] # remap to the local index
@@ -580,12 +556,11 @@ def split_mesh(verts_loc, faces, unique_materials, filepath, SPLIT_OB_OR_GROUP,
faces_split.append(face)
-
# remove one of the itemas and reorder
return [(value[0], value[1], value[2], key_to_name(key)) for key, value in list(face_split_dict.items())]
-def create_mesh(scn, new_objects, has_ngons, CREATE_FGONS, CREATE_EDGES, verts_loc, verts_tex, faces, unique_materials, unique_material_images, unique_smooth_groups, vertex_groups, dataname):
+def create_mesh(new_objects, has_ngons, CREATE_FGONS, CREATE_EDGES, verts_loc, verts_tex, faces, unique_materials, unique_material_images, unique_smooth_groups, vertex_groups, dataname):
'''
Takes all the data gathered and generates a mesh, adding the new object to new_objects
deals with fgons, sharp edges and assigning materials
@@ -844,7 +819,6 @@ def create_mesh(scn, new_objects, has_ngons, CREATE_FGONS, CREATE_EDGES, verts_l
# me.calcNormals()
ob= bpy.data.objects.new("Mesh", me)
- scn.objects.link(ob)
new_objects.append(ob)
# Create the vertex groups. No need to have the flag passed here since we test for the
@@ -858,7 +832,7 @@ def create_mesh(scn, new_objects, has_ngons, CREATE_FGONS, CREATE_EDGES, verts_l
# me.assignVertsToGroup(group_name, group_indicies, 1.00, Mesh.AssignModes.REPLACE)
-def create_nurbs(scn, context_nurbs, vert_loc, new_objects):
+def create_nurbs(context_nurbs, vert_loc, new_objects):
'''
Add nurbs object to blender, only support one type at the moment
'''
@@ -933,8 +907,9 @@ def create_nurbs(scn, context_nurbs, vert_loc, new_objects):
if do_closed:
nu.flagU |= 1
'''
+
+ ob= bpy.data.objects.new("Mesh", me)
- ob = scn.objects.new(cu)
new_objects.append(ob)
@@ -1259,7 +1234,6 @@ def load_obj(filepath,
# bpy.ops.OBJECT_OT_select_all()
scene = context.scene
-# scn = bpy.data.scenes.active
# scn.objects.selected = []
new_objects= [] # put new objects here
@@ -1268,14 +1242,20 @@ def load_obj(filepath,
if SPLIT_OBJECTS or SPLIT_GROUPS: SPLIT_OB_OR_GROUP = True
else: SPLIT_OB_OR_GROUP = False
- for verts_loc_split, faces_split, unique_materials_split, dataname in split_mesh(verts_loc, faces, unique_materials, filepath, SPLIT_OB_OR_GROUP, SPLIT_MATERIALS):
+ for verts_loc_split, faces_split, unique_materials_split, dataname in split_mesh(verts_loc, faces, unique_materials, filepath, SPLIT_OB_OR_GROUP):
# Create meshes from the data, warning 'vertex_groups' wont support splitting
- create_mesh(scene, new_objects, has_ngons, CREATE_FGONS, CREATE_EDGES, verts_loc_split, verts_tex, faces_split, unique_materials_split, unique_material_images, unique_smooth_groups, vertex_groups, dataname)
+ create_mesh(new_objects, has_ngons, CREATE_FGONS, CREATE_EDGES, verts_loc_split, verts_tex, faces_split, unique_materials_split, unique_material_images, unique_smooth_groups, vertex_groups, dataname)
# nurbs support
# for context_nurbs in nurbs:
# create_nurbs(scn, context_nurbs, verts_loc, new_objects)
+ # Create new obj
+ for obj in new_objects:
+ scene.objects.link(obj)
+
+ scene.update()
+
axis_min= [ 1000000000]*3
axis_max= [-1000000000]*3
@@ -1317,14 +1297,13 @@ def load_obj_ui(filepath, BATCH_LOAD= False):
if BPyMessages.Error_NoFile(filepath):
return
- global CREATE_SMOOTH_GROUPS, CREATE_FGONS, CREATE_EDGES, SPLIT_OBJECTS, SPLIT_GROUPS, SPLIT_MATERIALS, CLAMP_SIZE, IMAGE_SEARCH, POLYGROUPS, KEEP_VERT_ORDER, ROTATE_X90
+ global CREATE_SMOOTH_GROUPS, CREATE_FGONS, CREATE_EDGES, SPLIT_OBJECTS, SPLIT_GROUPS, CLAMP_SIZE, IMAGE_SEARCH, POLYGROUPS, KEEP_VERT_ORDER, ROTATE_X90
CREATE_SMOOTH_GROUPS= Draw.Create(0)
CREATE_FGONS= Draw.Create(1)
CREATE_EDGES= Draw.Create(1)
SPLIT_OBJECTS= Draw.Create(0)
SPLIT_GROUPS= Draw.Create(0)
- SPLIT_MATERIALS= Draw.Create(0)
CLAMP_SIZE= Draw.Create(10.0)
IMAGE_SEARCH= Draw.Create(1)
POLYGROUPS= Draw.Create(0)
@@ -1343,7 +1322,6 @@ def load_obj_ui(filepath, BATCH_LOAD= False):
'Separate objects from obj...',\
('Object', SPLIT_OBJECTS, 'Import OBJ Objects into Blender Objects'),\
('Group', SPLIT_GROUPS, 'Import OBJ Groups into Blender Objects'),\
- ('Split Materials', SPLIT_MATERIALS, 'Import each material into a separate mesh'),\
'Options...',\
('Keep Vert Order', KEEP_VERT_ORDER, 'Keep vert and face order, disables some other options.'),\
('Clamp Scale:', CLAMP_SIZE, 0.0, 1000.0, 'Clamp the size to this maximum (Zero to Disable)'),\
@@ -1356,7 +1334,6 @@ def load_obj_ui(filepath, BATCH_LOAD= False):
if KEEP_VERT_ORDER.val:
SPLIT_OBJECTS.val = False
SPLIT_GROUPS.val = False
- SPLIT_MATERIALS.val = False
'''
@@ -1378,25 +1355,25 @@ def load_obj_ui(filepath, BATCH_LOAD= False):
GLOBALS['EVENT'] = e
def do_split(e,v):
- global SPLIT_OBJECTS, SPLIT_GROUPS, SPLIT_MATERIALS, KEEP_VERT_ORDER, POLYGROUPS
- if SPLIT_OBJECTS.val or SPLIT_GROUPS.val or SPLIT_MATERIALS.val:
+ global SPLIT_OBJECTS, SPLIT_GROUPS, KEEP_VERT_ORDER, POLYGROUPS
+ if SPLIT_OBJECTS.val or SPLIT_GROUPS.val:
KEEP_VERT_ORDER.val = 0
POLYGROUPS.val = 0
else:
KEEP_VERT_ORDER.val = 1
def do_vertorder(e,v):
- global SPLIT_OBJECTS, SPLIT_GROUPS, SPLIT_MATERIALS, KEEP_VERT_ORDER
+ global SPLIT_OBJECTS, SPLIT_GROUPS, KEEP_VERT_ORDER
if KEEP_VERT_ORDER.val:
- SPLIT_OBJECTS.val = SPLIT_GROUPS.val = SPLIT_MATERIALS.val = 0
+ SPLIT_OBJECTS.val = SPLIT_GROUPS.val = 0
else:
- if not (SPLIT_OBJECTS.val or SPLIT_GROUPS.val or SPLIT_MATERIALS.val):
+ if not (SPLIT_OBJECTS.val or SPLIT_GROUPS.val):
KEEP_VERT_ORDER.val = 1
def do_polygroups(e,v):
- global SPLIT_OBJECTS, SPLIT_GROUPS, SPLIT_MATERIALS, KEEP_VERT_ORDER, POLYGROUPS
+ global SPLIT_OBJECTS, SPLIT_GROUPS, KEEP_VERT_ORDER, POLYGROUPS
if POLYGROUPS.val:
- SPLIT_OBJECTS.val = SPLIT_GROUPS.val = SPLIT_MATERIALS.val = 0
+ SPLIT_OBJECTS.val = SPLIT_GROUPS.val = 0
def do_help(e,v):
url = __url__[0]
@@ -1416,7 +1393,7 @@ def load_obj_ui(filepath, BATCH_LOAD= False):
ui_x -= 165
ui_y -= 90
- global CREATE_SMOOTH_GROUPS, CREATE_FGONS, CREATE_EDGES, SPLIT_OBJECTS, SPLIT_GROUPS, SPLIT_MATERIALS, CLAMP_SIZE, IMAGE_SEARCH, POLYGROUPS, KEEP_VERT_ORDER, ROTATE_X90
+ global CREATE_SMOOTH_GROUPS, CREATE_FGONS, CREATE_EDGES, SPLIT_OBJECTS, SPLIT_GROUPS, CLAMP_SIZE, IMAGE_SEARCH, POLYGROUPS, KEEP_VERT_ORDER, ROTATE_X90
Draw.Label('Import...', ui_x+9, ui_y+159, 220, 21)
Draw.BeginAlign()
@@ -1429,7 +1406,6 @@ def load_obj_ui(filepath, BATCH_LOAD= False):
Draw.BeginAlign()
SPLIT_OBJECTS = Draw.Toggle('Object', EVENT_REDRAW, ui_x+9, ui_y+89, 55, 21, SPLIT_OBJECTS.val, 'Import OBJ Objects into Blender Objects', do_split)
SPLIT_GROUPS = Draw.Toggle('Group', EVENT_REDRAW, ui_x+64, ui_y+89, 55, 21, SPLIT_GROUPS.val, 'Import OBJ Groups into Blender Objects', do_split)
- SPLIT_MATERIALS = Draw.Toggle('Split Materials', EVENT_REDRAW, ui_x+119, ui_y+89, 60, 21, SPLIT_MATERIALS.val, 'Import each material into a separate mesh', do_split)
Draw.EndAlign()
# Only used for user feedback
@@ -1489,7 +1465,6 @@ def load_obj_ui(filepath, BATCH_LOAD= False):
CREATE_EDGES.val,\
SPLIT_OBJECTS.val,\
SPLIT_GROUPS.val,\
- SPLIT_MATERIALS.val,\
ROTATE_X90.val,\
IMAGE_SEARCH.val,\
POLYGROUPS.val
@@ -1503,7 +1478,6 @@ def load_obj_ui(filepath, BATCH_LOAD= False):
CREATE_EDGES.val,\
SPLIT_OBJECTS.val,\
SPLIT_GROUPS.val,\
- SPLIT_MATERIALS.val,\
ROTATE_X90.val,\
IMAGE_SEARCH.val,\
POLYGROUPS.val
@@ -1567,7 +1541,6 @@ class IMPORT_OT_obj(bpy.types.Operator):
CREATE_EDGES = BoolProperty(name="Lines as Edges", description="Import lines and faces with 2 verts as edge", default= True)
SPLIT_OBJECTS = BoolProperty(name="Object", description="Import OBJ Objects into Blender Objects", default= True)
SPLIT_GROUPS = BoolProperty(name="Group", description="Import OBJ Groups into Blender Objects", default= True)
- SPLIT_MATERIALS = BoolProperty(name="Split Materials", description="Import each material into a separate mesh", default= False)
# old comment: only used for user feedback
# disabled this option because in old code a handler for it disabled SPLIT* params, it's not passed to load_obj
# KEEP_VERT_ORDER = BoolProperty(name="Keep Vert Order", description="Keep vert and face order, disables split options, enable for morph targets", default= True)
@@ -1588,7 +1561,6 @@ class IMPORT_OT_obj(bpy.types.Operator):
self.properties.CREATE_EDGES,
self.properties.SPLIT_OBJECTS,
self.properties.SPLIT_GROUPS,
- self.properties.SPLIT_MATERIALS,
self.properties.ROTATE_X90,
self.properties.IMAGE_SEARCH,
self.properties.POLYGROUPS)
diff --git a/release/scripts/io/netrender/client.py b/release/scripts/io/netrender/client.py
index 4a116bb982a..a1ad4d3e91c 100644
--- a/release/scripts/io/netrender/client.py
+++ b/release/scripts/io/netrender/client.py
@@ -171,6 +171,7 @@ def clientSendJob(conn, scene, anim = False):
# try to send path first
conn.request("POST", "/job", repr(job.serialize()))
response = conn.getresponse()
+ response.read()
job_id = response.getheader("job-id")
@@ -181,6 +182,7 @@ def clientSendJob(conn, scene, anim = False):
conn.request("PUT", fileURL(job_id, rfile.index), f)
f.close()
response = conn.getresponse()
+ response.read()
# server will reply with ACCEPTED until all files are found
@@ -236,6 +238,7 @@ class NetworkRenderEngine(bpy.types.RenderEngine):
requestResult(conn, job_id, scene.frame_current)
response = conn.getresponse()
+ response.read()
if response.status == http.client.NO_CONTENT:
new_job = True
@@ -244,16 +247,19 @@ class NetworkRenderEngine(bpy.types.RenderEngine):
requestResult(conn, job_id, scene.frame_current)
response = conn.getresponse()
+ response.read()
while response.status == http.client.ACCEPTED and not self.test_break():
time.sleep(1)
requestResult(conn, job_id, scene.frame_current)
response = conn.getresponse()
+ response.read()
# cancel new jobs (animate on network) on break
if self.test_break() and new_job:
conn.request("POST", cancelURL(job_id))
response = conn.getresponse()
+ response.read()
print( response.status, response.reason )
netsettings.job_id = 0
@@ -265,7 +271,7 @@ class NetworkRenderEngine(bpy.types.RenderEngine):
x= int(r.resolution_x*r.resolution_percentage*0.01)
y= int(r.resolution_y*r.resolution_percentage*0.01)
- f = open(netsettings.path + "output.exr", "wb")
+ f = open(os.path.join(netsettings.path, "output.exr"), "wb")
buf = response.read(1024)
while buf:
@@ -275,7 +281,7 @@ class NetworkRenderEngine(bpy.types.RenderEngine):
f.close()
result = self.begin_result(0, 0, x, y)
- result.load_from_file(netsettings.path + "output.exr")
+ result.load_from_file(os.path.join(netsettings.path, "output.exr"))
self.end_result(result)
conn.close()
diff --git a/release/scripts/io/netrender/master.py b/release/scripts/io/netrender/master.py
index f227f61a536..6deb925420b 100644
--- a/release/scripts/io/netrender/master.py
+++ b/release/scripts/io/netrender/master.py
@@ -89,7 +89,7 @@ class MRenderJob(netrender.model.RenderJob):
def save(self):
if self.save_path:
- f = open(self.save_path + "job.txt", "w")
+ f = open(os.path.join(self.save_path, "job.txt"), "w")
f.write(repr(self.serialize()))
f.close()
@@ -134,8 +134,8 @@ class MRenderJob(netrender.model.RenderJob):
self.status = JOB_QUEUED
def addLog(self, frames):
- log_name = "_".join(("%04d" % f for f in frames)) + ".log"
- log_path = self.save_path + log_name
+ log_name = "_".join(("%06d" % f for f in frames)) + ".log"
+ log_path = os.path.join(self.save_path, log_name)
for number in frames:
frame = self[number]
@@ -260,7 +260,7 @@ class RenderHandler(http.server.BaseHTTPRequestHandler):
elif frame.status == DONE:
self.server.stats("", "Sending result to client")
- filename = job.save_path + "%04d" % frame_number + ".exr"
+ filename = os.path.join(job.save_path, "%06d.exr" % frame_number)
f = open(filename, 'rb')
self.send_head(content = "image/x-exr")
@@ -294,7 +294,7 @@ class RenderHandler(http.server.BaseHTTPRequestHandler):
if frame.status in (QUEUED, DISPATCHED):
self.send_head(http.client.ACCEPTED)
elif frame.status == DONE:
- filename = job.save_path + "%04d" % frame_number + ".exr"
+ filename = os.path.join(job.save_path, "%06d.exr" % frame_number)
thumbname = thumbnail(filename)
@@ -716,7 +716,7 @@ class RenderHandler(http.server.BaseHTTPRequestHandler):
if file_index > 0:
file_path = prefixPath(job.save_path, render_file.filepath, main_path)
else:
- file_path = job.save_path + main_name
+ file_path = os.path.join(job.save_path, main_name)
buf = self.rfile.read(length)
@@ -772,7 +772,7 @@ class RenderHandler(http.server.BaseHTTPRequestHandler):
if job_result == DONE:
length = int(self.headers['content-length'])
buf = self.rfile.read(length)
- f = open(job.save_path + "%04d" % job_frame + ".exr", 'wb')
+ f = open(os.path.join(job.save_path, "%06d.exr" % job_frame), 'wb')
f.write(buf)
f.close()
@@ -822,13 +822,12 @@ class RenderHandler(http.server.BaseHTTPRequestHandler):
if job.type == netrender.model.JOB_BLENDER:
length = int(self.headers['content-length'])
buf = self.rfile.read(length)
- f = open(job.save_path + "%04d" % job_frame + ".jpg", 'wb')
+ f = open(os.path.join(job.save_path, "%06d.jpg" % job_frame), 'wb')
f.write(buf)
f.close()
del buf
- self.send_head()
else: # frame not found
self.send_head(http.client.NO_CONTENT)
else: # job not found
@@ -880,7 +879,7 @@ class RenderMasterServer(socketserver.ThreadingMixIn, http.server.HTTPServer):
self.job_id = 0
if subdir:
- self.path = path + "master_" + str(os.getpid()) + os.sep
+ self.path = os.path.join(path, "master_" + str(os.getpid()))
else:
self.path = path
@@ -1007,7 +1006,7 @@ class RenderMasterServer(socketserver.ThreadingMixIn, http.server.HTTPServer):
self.jobs_map[job.id] = job
# create job directory
- job.save_path = self.path + "job_" + job.id + os.sep
+ job.save_path = os.path.join(self.path, "job_" + job.id)
if not os.path.exists(job.save_path):
os.mkdir(job.save_path)
diff --git a/release/scripts/io/netrender/operators.py b/release/scripts/io/netrender/operators.py
index 55c2d11e28e..c9645d0d1ee 100644
--- a/release/scripts/io/netrender/operators.py
+++ b/release/scripts/io/netrender/operators.py
@@ -205,6 +205,7 @@ class RENDER_OT_netclientstatus(bpy.types.Operator):
conn.request("GET", "/status")
response = conn.getresponse()
+ response.read()
print( response.status, response.reason )
jobs = (netrender.model.RenderJob.materialize(j) for j in eval(str(response.read(), encoding='utf8')))
@@ -306,6 +307,7 @@ class RENDER_OT_netclientslaves(bpy.types.Operator):
conn.request("GET", "/slaves")
response = conn.getresponse()
+ response.read()
print( response.status, response.reason )
slaves = (netrender.model.RenderSlave.materialize(s) for s in eval(str(response.read(), encoding='utf8')))
@@ -354,6 +356,7 @@ class RENDER_OT_netclientcancel(bpy.types.Operator):
conn.request("POST", cancelURL(job.id))
response = conn.getresponse()
+ response.read()
print( response.status, response.reason )
netsettings.jobs.remove(netsettings.active_job_index)
@@ -380,6 +383,7 @@ class RENDER_OT_netclientcancelall(bpy.types.Operator):
conn.request("POST", "/clear")
response = conn.getresponse()
+ response.read()
print( response.status, response.reason )
while(len(netsettings.jobs) > 0):
@@ -412,6 +416,7 @@ class netclientdownload(bpy.types.Operator):
for frame in job.frames:
client.requestResult(conn, job.id, frame.number)
response = conn.getresponse()
+ response.read()
if response.status != http.client.OK:
print("missing", frame.number)
@@ -419,7 +424,7 @@ class netclientdownload(bpy.types.Operator):
print("got back", frame.number)
- f = open(netsettings.path + "%06d" % frame.number + ".exr", "wb")
+ f = open(os.path.join(netsettings.path, "%06d.exr" % frame.number), "wb")
buf = response.read(1024)
while buf:
diff --git a/release/scripts/io/netrender/slave.py b/release/scripts/io/netrender/slave.py
index fbaf2254504..7528ea1053a 100644
--- a/release/scripts/io/netrender/slave.py
+++ b/release/scripts/io/netrender/slave.py
@@ -59,8 +59,8 @@ def slave_Info():
def testCancel(conn, job_id, frame_number):
conn.request("HEAD", "/status", headers={"job-id":job_id, "job-frame": str(frame_number)})
- # cancelled if job isn't found anymore
- if conn.getresponse().status == http.client.NO_CONTENT:
+ # canceled if job isn't found anymore
+ if responseStatus(conn) == http.client.NO_CONTENT:
return True
else:
return False
@@ -81,7 +81,7 @@ def testFile(conn, job_id, slave_id, rfile, JOB_PREFIX, main_path = None):
if not found:
# Force prefix path if not found
job_full_path = prefixPath(JOB_PREFIX, rfile.filepath, main_path, force = True)
- temp_path = JOB_PREFIX + "slave.temp"
+ temp_path = os.path.join(JOB_PREFIX, "slave.temp")
conn.request("GET", fileURL(job_id, rfile.index), headers={"slave-id":slave_id})
response = conn.getresponse()
@@ -113,10 +113,11 @@ def render_slave(engine, netsettings, threads):
if conn:
conn.request("POST", "/slave", repr(slave_Info().serialize()))
response = conn.getresponse()
+ response.read()
slave_id = response.getheader("slave-id")
- NODE_PREFIX = netsettings.path + "slave_" + slave_id + os.sep
+ NODE_PREFIX = os.path.join(netsettings.path, "slave_" + slave_id)
if not os.path.exists(NODE_PREFIX):
os.mkdir(NODE_PREFIX)
@@ -132,7 +133,7 @@ def render_slave(engine, netsettings, threads):
job = netrender.model.RenderJob.materialize(eval(str(response.read(), encoding='utf8')))
engine.update_stats("", "Network render processing job from master")
- JOB_PREFIX = NODE_PREFIX + "job_" + job.id + os.sep
+ JOB_PREFIX = os.path.join(NODE_PREFIX, "job_" + job.id)
if not os.path.exists(JOB_PREFIX):
os.mkdir(JOB_PREFIX)
@@ -157,6 +158,7 @@ def render_slave(engine, netsettings, threads):
logfile = netrender.model.LogFile(job.id, slave_id, [frame.number for frame in job.frames])
conn.request("POST", "/log", bytes(repr(logfile.serialize()), encoding='utf8'))
response = conn.getresponse()
+ response.read()
first_frame = job.frames[0].number
@@ -172,7 +174,7 @@ def render_slave(engine, netsettings, threads):
frame_args += ["-f", str(frame.number)]
val = SetErrorMode()
- process = subprocess.Popen([BLENDER_PATH, "-b", "-noaudio", job_full_path, "-t", str(threads), "-o", JOB_PREFIX + "######", "-E", "BLENDER_RENDER", "-F", "MULTILAYER"] + frame_args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+ process = subprocess.Popen([BLENDER_PATH, "-b", "-noaudio", job_full_path, "-t", str(threads), "-o", os.path.join(JOB_PREFIX, "######"), "-E", "BLENDER_RENDER", "-F", "MULTILAYER"] + frame_args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
RestoreErrorMode(val)
elif job.type == netrender.model.JOB_PROCESS:
command = job.frames[0].command
@@ -196,9 +198,10 @@ def render_slave(engine, netsettings, threads):
# (only need to update on one frame, they are linked
conn.request("PUT", logURL(job.id, first_frame), stdout, headers=headers)
response = conn.getresponse()
+ response.read()
# Also output on console
- if netsettings.slave_thumb:
+ if netsettings.slave_outputlog:
print(str(stdout, encoding='utf8'), end="")
stdout = bytes()
@@ -227,7 +230,7 @@ def render_slave(engine, netsettings, threads):
# (only need to update on one frame, they are linked
conn.request("PUT", logURL(job.id, first_frame), stdout, headers=headers)
- if conn.getresponse().status == http.client.NO_CONTENT:
+ if responseStatus(conn) == http.client.NO_CONTENT:
continue
total_t = time.time() - start_t
@@ -248,7 +251,7 @@ def render_slave(engine, netsettings, threads):
if job.type == netrender.model.JOB_BLENDER:
# send image back to server
- filename = JOB_PREFIX + "%06d" % frame.number + ".exr"
+ filename = os.path.join(JOB_PREFIX, "%06d.exr" % frame.number)
# thumbnail first
if netsettings.slave_thumb:
@@ -257,17 +260,18 @@ def render_slave(engine, netsettings, threads):
f = open(thumbname, 'rb')
conn.request("PUT", "/thumb", f, headers=headers)
f.close()
- conn.getresponse()
+ responseStatus(conn)
+
f = open(filename, 'rb')
conn.request("PUT", "/render", f, headers=headers)
f.close()
- if conn.getresponse().status == http.client.NO_CONTENT:
+ if responseStatus(conn) == http.client.NO_CONTENT:
continue
elif job.type == netrender.model.JOB_PROCESS:
conn.request("PUT", "/render", headers=headers)
- if conn.getresponse().status == http.client.NO_CONTENT:
+ if responseStatus(conn) == http.client.NO_CONTENT:
continue
else:
headers["job-result"] = str(ERROR)
@@ -275,7 +279,7 @@ def render_slave(engine, netsettings, threads):
headers["job-frame"] = str(frame.number)
# send error result back to server
conn.request("PUT", "/render", headers=headers)
- if conn.getresponse().status == http.client.NO_CONTENT:
+ if responseStatus(conn) == http.client.NO_CONTENT:
continue
engine.update_stats("", "Network render connected to master, waiting for jobs")
diff --git a/release/scripts/io/netrender/utils.py b/release/scripts/io/netrender/utils.py
index 31123ce26da..81617ac0d30 100644
--- a/release/scripts/io/netrender/utils.py
+++ b/release/scripts/io/netrender/utils.py
@@ -57,6 +57,11 @@ FRAME_STATUS_TEXT = {
ERROR: "Error"
}
+def responseStatus(conn):
+ response = conn.getresponse()
+ response.read()
+ return response.status
+
def reporting(report, message, errorType = None):
if errorType:
t = 'ERROR'
@@ -171,16 +176,16 @@ def prefixPath(prefix_directory, file_path, prefix_path, force = False):
if prefix_path and p.startswith(prefix_path):
if len(prefix_path) < len(p):
- directory = prefix_directory + p[len(prefix_path)+1:] + os.sep # +1 to remove separator
+ directory = os.path.join(prefix_directory, p[len(prefix_path)+1:]) # +1 to remove separator
if not os.path.exists(directory):
os.mkdir(directory)
else:
directory = prefix_directory
- full_path = directory + n
+ full_path = os.path.join(directory, n)
else:
- full_path = prefix_directory + n
+ full_path = os.path.join(prefix_directory, n)
else:
- full_path = prefix_directory + file_path
+ full_path = (prefix_directory, file_path)
return full_path
diff --git a/release/scripts/ui/properties_world.py b/release/scripts/ui/properties_world.py
index 1fa0d15a90b..bb5758ff902 100644
--- a/release/scripts/ui/properties_world.py
+++ b/release/scripts/ui/properties_world.py
@@ -29,8 +29,7 @@ class WorldButtonsPanel():
@classmethod
def poll(cls, context):
- rd = context.scene.render
- return (rd.engine in cls.COMPAT_ENGINES)
+ return (context.world and context.scene.render.engine in cls.COMPAT_ENGINES)
class WORLD_PT_context_world(WorldButtonsPanel, bpy.types.Panel):
@@ -70,12 +69,6 @@ class WORLD_PT_preview(WorldButtonsPanel, bpy.types.Panel):
self.layout.template_preview(context.world)
-
-
-
-
-
-
class WORLD_PT_world(WorldButtonsPanel, bpy.types.Panel):
bl_label = "World"
COMPAT_ENGINES = {'BLENDER_RENDER'}
@@ -141,8 +134,8 @@ class WORLD_PT_indirect_lighting(WorldButtonsPanel, bpy.types.Panel):
@classmethod
def poll(cls, context):
- light = context.world.lighting
- return light.gather_method == 'APPROXIMATE'
+ light = getattr(context.world, "lighting", None)
+ return light and light.gather_method == 'APPROXIMATE'
def draw_header(self, context):
light = context.world.lighting
diff --git a/release/scripts/ui/space_image.py b/release/scripts/ui/space_image.py
index 99d41244431..b3598b4d04c 100644
--- a/release/scripts/ui/space_image.py
+++ b/release/scripts/ui/space_image.py
@@ -575,7 +575,7 @@ class IMAGE_PT_paint(bpy.types.Panel):
col = layout.split().column()
row = col.row()
- col.template_ID_preview(toolsettings, "brush", new="brush.add", filter="is_imapaint_brush", rows=3, cols=8)
+ col.template_ID_preview(toolsettings, "brush", new="brush.add", rows=3, cols=8)
if brush:
col = layout.column()
diff --git a/release/scripts/ui/space_userpref.py b/release/scripts/ui/space_userpref.py
index 049d22a44ed..5f9514c5885 100644
--- a/release/scripts/ui/space_userpref.py
+++ b/release/scripts/ui/space_userpref.py
@@ -91,15 +91,12 @@ class USERPREF_HT_header(bpy.types.Header):
layout.operator_context = 'INVOKE_DEFAULT'
if userpref.active_section == 'INPUT':
- op = layout.operator("wm.keyconfig_export")
- op.filepath = "keymap.py"
- op = layout.operator("wm.keyconfig_import")
- op.filepath = "keymap.py"
+ layout.operator("wm.keyconfig_export")
+ layout.operator("wm.keyconfig_import")
elif userpref.active_section == 'ADDONS':
- op = layout.operator("wm.addon_install")
- op.filepath = "*.py"
+ layout.operator("wm.addon_install")
elif userpref.active_section == 'THEMES':
- op = layout.operator("ui.reset_default_theme")
+ layout.operator("ui.reset_default_theme")
class USERPREF_PT_tabs(bpy.types.Panel):
diff --git a/release/scripts/ui/space_userpref_keymap.py b/release/scripts/ui/space_userpref_keymap.py
index 9a482b89652..24d77dbed32 100644
--- a/release/scripts/ui/space_userpref_keymap.py
+++ b/release/scripts/ui/space_userpref_keymap.py
@@ -19,8 +19,8 @@
# <pep8 compliant>
import bpy
import os
-import re
-import shutil
+
+KM_MOD_PREFIX = "keyconfig_"
KM_HIERARCHY = [
('Window', 'EMPTY', 'WINDOW', []), # file save, window change, exit
@@ -384,6 +384,22 @@ class InputKeyMapPanel(bpy.types.Panel):
from bpy.props import *
+def export_properties(prefix, properties, lines=None):
+ if lines is None:
+ lines = []
+
+ for pname in properties.keys():
+ if not properties.is_property_hidden(pname):
+ value = getattr(properties, pname)
+ if isinstance(value, bpy.types.OperatorProperties):
+ export_properties(prefix + "." + pname, value, lines)
+ elif properties.is_property_set(pname):
+ value = _string_value(value)
+ if value != "":
+ lines.append("%s.%s = %s\n" % (prefix, pname, value))
+ return lines
+
+
class WM_OT_keyconfig_test(bpy.types.Operator):
"Test keyconfig for conflicts"
bl_idname = "wm.keyconfig_test"
@@ -414,21 +430,10 @@ class WM_OT_keyconfig_test(bpy.types.Operator):
s.append(")\n")
- def export_properties(prefix, properties):
- for pname in dir(properties):
- if not properties.is_property_hidden(pname):
- value = eval("properties.%s" % pname)
- if isinstance(value, bpy.types.OperatorProperties):
- export_properties(prefix + "." + pname, value)
- elif properties.is_property_set(pname):
- value = _string_value(value)
- if value != "":
- s.append(prefix + ".%s = %s\n" % (pname, value))
-
props = kmi.properties
if props is not None:
- export_properties("kmi.properties", props)
+ export_properties("kmi.properties", props, s)
return "".join(s).strip()
@@ -508,7 +513,7 @@ class WM_OT_keyconfig_import(bpy.types.Operator):
bl_idname = "wm.keyconfig_import"
bl_label = "Import Key Configuration..."
- filepath = StringProperty(name="File Path", description="Filepath to write file to")
+ filepath = StringProperty(name="File Path", description="Filepath to write file to", default="keymap.py")
filter_folder = BoolProperty(name="Filter folders", description="", default=True, options={'HIDDEN'})
filter_text = BoolProperty(name="Filter text", description="", default=True, options={'HIDDEN'})
filter_python = BoolProperty(name="Filter python", description="", default=True, options={'HIDDEN'})
@@ -516,25 +521,26 @@ class WM_OT_keyconfig_import(bpy.types.Operator):
keep_original = BoolProperty(name="Keep original", description="Keep original file after copying to configuration folder", default=True)
def execute(self, context):
- if not self.properties.filepath:
+ import shutil
+ if not self.properties.is_property_set("filepath"):
raise Exception("Filepath not set")
f = open(self.properties.filepath, "r")
if not f:
raise Exception("Could not open file")
- name_pattern = re.compile("^kc = wm.add_keyconfig\('(.*)'\)$")
+ config_name = None
+ for line in f:
+ if line.startswith("kc = wm.add_keyconfig("):
+ config_name = line[23:-3]
+ break
- for line in f.readlines():
- match = name_pattern.match(line)
+ if config_name is None:
+ raise Exception("config name not found")
- if match:
- config_name = match.groups()[0]
-
- f.close()
-
- path = os.path.split(os.path.split(__file__)[0])[0] # remove ui/space_userpref.py
- path = os.path.join(path, "cfg")
+ path = os.path.join(__file__, "..", "..", "cfg") # remove ui/space_userpref.py
+ path = os.path.normpath(path)
+ print(path)
# create config folder if needed
if not os.path.exists(path):
@@ -547,7 +553,16 @@ class WM_OT_keyconfig_import(bpy.types.Operator):
else:
shutil.move(self.properties.filepath, path)
- exec("import " + config_name)
+ # sneaky way to check we're actually running the code.
+ wm = context.manager
+ while config_name in wm.keyconfigs:
+ wm.remove_keyconfig(wm.keyconfigs[config_name])
+
+ wm = context.manager
+ totmap = len(wm.keyconfigs)
+ mod = __import__(config_name)
+ if totmap == len(wm.keyconfigs):
+ reload(mod)
wm = bpy.context.manager
wm.active_keyconfig = wm.keyconfigs[config_name]
@@ -567,14 +582,14 @@ class WM_OT_keyconfig_export(bpy.types.Operator):
bl_idname = "wm.keyconfig_export"
bl_label = "Export Key Configuration..."
- filepath = StringProperty(name="File Path", description="Filepath to write file to")
+ filepath = StringProperty(name="File Path", description="Filepath to write file to", default="keymap.py")
filter_folder = BoolProperty(name="Filter folders", description="", default=True, options={'HIDDEN'})
filter_text = BoolProperty(name="Filter text", description="", default=True, options={'HIDDEN'})
filter_python = BoolProperty(name="Filter python", description="", default=True, options={'HIDDEN'})
kc_name = StringProperty(name="KeyConfig Name", description="Name to save the key config as")
def execute(self, context):
- if not self.properties.filepath:
+ if not self.properties.is_property_set("filepath"):
raise Exception("Filepath not set")
f = open(self.properties.filepath, "w")
@@ -642,21 +657,10 @@ class WM_OT_keyconfig_export(bpy.types.Operator):
f.write(", key_modifier='%s'" % kmi.key_modifier)
f.write(")\n")
- def export_properties(prefix, properties):
- for pname in dir(properties):
- if not properties.is_property_hidden(pname):
- value = eval("properties.%s" % pname)
- if isinstance(value, bpy.types.OperatorProperties):
- export_properties(prefix + "." + pname, value)
- elif properties.is_property_set(pname):
- value = _string_value(value)
- if value != "":
- f.write(prefix + ".%s = %s\n" % (pname, value))
-
props = kmi.properties
if props is not None:
- export_properties("kmi.properties", props)
+ f.write("".join(export_properties("kmi.properties", props)))
f.write("\n")
@@ -769,18 +773,22 @@ class WM_OT_keyconfig_remove(bpy.types.Operator):
return wm.active_keyconfig.user_defined
def execute(self, context):
+ import sys
wm = context.manager
keyconfig = wm.active_keyconfig
- module = __import__(keyconfig.name)
+ module = sys.modules.get(keyconfig.name)
- os.remove(module.__file__)
+ if module:
+ path = module.__file__
+ if os.path.exists(path):
+ os.remove(path)
- compiled_path = module.__file__ + "c" # for .pyc
+ path = module.__file__ + "c" # for .pyc
- if os.path.exists(compiled_path):
- os.remove(compiled_path)
+ if os.path.exists(path):
+ os.remove(path)
wm.remove_keyconfig(keyconfig)
return {'FINISHED'}