Welcome to mirror list, hosted at ThFree Co, Russian Federation.

git.blender.org/blender.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
Diffstat (limited to 'release/scripts/modules')
-rw-r--r--release/scripts/modules/addon_utils.py42
-rw-r--r--release/scripts/modules/bl_i18n_utils/bl_extract_messages.py23
-rw-r--r--release/scripts/modules/bl_i18n_utils/settings.py3
-rw-r--r--release/scripts/modules/bl_i18n_utils/utils_spell_check.py7
-rw-r--r--release/scripts/modules/bl_previews_utils/bl_previews_render.py52
-rw-r--r--release/scripts/modules/bpy/__init__.py26
-rw-r--r--release/scripts/modules/bpy/path.py55
-rw-r--r--release/scripts/modules/bpy/utils/__init__.py2
-rw-r--r--release/scripts/modules/bpy/utils/previews.py4
-rw-r--r--release/scripts/modules/bpy_extras/image_utils.py29
-rw-r--r--release/scripts/modules/bpy_extras/io_utils.py6
-rw-r--r--release/scripts/modules/bpy_extras/keyconfig_utils.py31
-rw-r--r--release/scripts/modules/bpy_extras/object_utils.py31
-rw-r--r--release/scripts/modules/bpy_types.py46
-rw-r--r--release/scripts/modules/console_python.py52
-rw-r--r--release/scripts/modules/progress_report.py2
-rw-r--r--release/scripts/modules/rna_keymap_ui.py20
-rw-r--r--release/scripts/modules/rna_prop_ui.py3
-rw-r--r--release/scripts/modules/sys_info.py119
19 files changed, 341 insertions, 212 deletions
diff --git a/release/scripts/modules/addon_utils.py b/release/scripts/modules/addon_utils.py
index 123b3cb953c..95c0e5f187d 100644
--- a/release/scripts/modules/addon_utils.py
+++ b/release/scripts/modules/addon_utils.py
@@ -193,7 +193,7 @@ def modules_refresh(module_cache=addons_fake_modules):
del modules_stale
-def modules(module_cache=addons_fake_modules, refresh=True):
+def modules(module_cache=addons_fake_modules, *, refresh=True):
if refresh or ((module_cache is addons_fake_modules) and modules._is_first):
modules_refresh(module_cache)
modules._is_first = False
@@ -255,12 +255,18 @@ def _addon_remove(module_name):
addons.remove(addon)
-def enable(module_name, default_set=False, persistent=False, handle_error=None):
+def enable(module_name, *, default_set=False, persistent=False, handle_error=None):
"""
Enables an addon by name.
- :arg module_name: The name of the addon and module.
+ :arg module_name: the name of the addon and module.
:type module_name: string
+ :arg default_set: Set the user-preference.
+ :type default_set: bool
+ :arg persistent: Ensure the addon is enabled for the entire session (after loading new files).
+ :type persistent: bool
+ :arg handle_error: Called in the case of an error, taking an exception argument.
+ :type handle_error: function
:return: the loaded module or None on failure.
:rtype: module
"""
@@ -270,7 +276,7 @@ def enable(module_name, default_set=False, persistent=False, handle_error=None):
from bpy_restrict_state import RestrictBlend
if handle_error is None:
- def handle_error():
+ def handle_error(ex):
import traceback
traceback.print_exc()
@@ -286,10 +292,10 @@ def enable(module_name, default_set=False, persistent=False, handle_error=None):
# in most cases the caller should 'check()' first.
try:
mod.unregister()
- except:
+ except Exception as ex:
print("Exception in module unregister(): %r" %
getattr(mod, "__file__", module_name))
- handle_error()
+ handle_error(ex)
return None
mod.__addon_enabled__ = False
@@ -301,8 +307,8 @@ def enable(module_name, default_set=False, persistent=False, handle_error=None):
try:
importlib.reload(mod)
- except:
- handle_error()
+ except Exception as ex:
+ handle_error(ex)
del sys.modules[module_name]
return None
mod.__addon_enabled__ = False
@@ -329,7 +335,7 @@ def enable(module_name, default_set=False, persistent=False, handle_error=None):
if type(ex) is ImportError and ex.name == module_name:
print("addon not found: %r" % module_name)
else:
- handle_error()
+ handle_error(ex)
if default_set:
_addon_remove(module_name)
@@ -341,10 +347,10 @@ def enable(module_name, default_set=False, persistent=False, handle_error=None):
# 3) try run the modules register function
try:
mod.register()
- except:
+ except Exception as ex:
print("Exception in module register(): %r" %
getattr(mod, "__file__", module_name))
- handle_error()
+ handle_error(ex)
del sys.modules[module_name]
if default_set:
_addon_remove(module_name)
@@ -360,17 +366,21 @@ def enable(module_name, default_set=False, persistent=False, handle_error=None):
return mod
-def disable(module_name, default_set=False, handle_error=None):
+def disable(module_name, *, default_set=False, handle_error=None):
"""
Disables an addon by name.
:arg module_name: The name of the addon and module.
:type module_name: string
+ :arg default_set: Set the user-preference.
+ :type default_set: bool
+ :arg handle_error: Called in the case of an error, taking an exception argument.
+ :type handle_error: function
"""
import sys
if handle_error is None:
- def handle_error():
+ def handle_error(ex):
import traceback
traceback.print_exc()
@@ -385,10 +395,10 @@ def disable(module_name, default_set=False, handle_error=None):
try:
mod.unregister()
- except:
+ except Exception as ex:
print("Exception in module unregister(): %r" %
getattr(mod, "__file__", module_name))
- handle_error()
+ handle_error(ex)
else:
print("addon_utils.disable: %s not %s." %
(module_name, "disabled" if mod is None else "loaded"))
@@ -401,7 +411,7 @@ def disable(module_name, default_set=False, handle_error=None):
print("\taddon_utils.disable", module_name)
-def reset_all(reload_scripts=False):
+def reset_all(*, reload_scripts=False):
"""
Sets the addon state based on the user preferences.
"""
diff --git a/release/scripts/modules/bl_i18n_utils/bl_extract_messages.py b/release/scripts/modules/bl_i18n_utils/bl_extract_messages.py
index 43a09a1acbd..5a3eda567be 100644
--- a/release/scripts/modules/bl_i18n_utils/bl_extract_messages.py
+++ b/release/scripts/modules/bl_i18n_utils/bl_extract_messages.py
@@ -304,7 +304,8 @@ def dump_rna_messages(msgs, reports, settings, verbose=False):
else:
bl_rna_base_props = set()
- for prop in bl_rna.properties:
+ props = sorted(bl_rna.properties, key=lambda p: p.identifier)
+ for prop in props:
# Only write this property if our parent hasn't got it.
if prop in bl_rna_base_props:
continue
@@ -321,8 +322,20 @@ def dump_rna_messages(msgs, reports, settings, verbose=False):
process_msg(msgs, default_context, prop.description, msgsrc, reports, check_ctxt_rna_tip, settings)
if isinstance(prop, bpy.types.EnumProperty):
+ done_items = set()
for item in prop.enum_items:
msgsrc = "bpy.types.{}.{}:'{}'".format(bl_rna.identifier, prop.identifier, item.identifier)
+ done_items.add(item.identifier)
+ if item.name and item.name != item.identifier:
+ process_msg(msgs, msgctxt, item.name, msgsrc, reports, check_ctxt_rna, settings)
+ if item.description:
+ process_msg(msgs, default_context, item.description, msgsrc, reports, check_ctxt_rna_tip,
+ settings)
+ for item in prop.enum_items_static:
+ if item.identifier in done_items:
+ continue
+ msgsrc = "bpy.types.{}.{}:'{}'".format(bl_rna.identifier, prop.identifier, item.identifier)
+ done_items.add(item.identifier)
if item.name and item.name != item.identifier:
process_msg(msgs, msgctxt, item.name, msgsrc, reports, check_ctxt_rna, settings)
if item.description:
@@ -456,7 +469,7 @@ def dump_py_messages_from_files(msgs, reports, files, settings):
def extract_strings_split(node):
"""
- Returns a list args as returned by 'extract_strings()', But split into groups based on separate_nodes, this way
+ Returns a list args as returned by 'extract_strings()', but split into groups based on separate_nodes, this way
expressions like ("A" if test else "B") wont be merged but "A" + "B" will.
"""
estr_ls = []
@@ -492,7 +505,11 @@ def dump_py_messages_from_files(msgs, reports, files, settings):
return i18n_contexts.default
def _op_to_ctxt(node):
- opname, _ = extract_strings(node)
+ # Some smart coders like things like:
+ # >>> row.operator("wm.addon_disable" if is_enabled else "wm.addon_enable", ...)
+ # We only take first arg into account here!
+ bag = extract_strings_split(node)
+ opname, _ = bag[0]
if not opname:
return i18n_contexts.default
op = bpy.ops
diff --git a/release/scripts/modules/bl_i18n_utils/settings.py b/release/scripts/modules/bl_i18n_utils/settings.py
index 49dbfbe62af..dd6b79f6686 100644
--- a/release/scripts/modules/bl_i18n_utils/settings.py
+++ b/release/scripts/modules/bl_i18n_utils/settings.py
@@ -89,6 +89,7 @@ LANGUAGES = (
(39, "Uzbek Cyrillic (Ўзбек)", "uz_UZ@cyrillic"),
(40, "Hindi (मानक हिन्दी)", "hi_IN"),
(41, "Vietnamese (tiếng Việt)", "vi_VN"),
+ (42, "Basque (Euskara)", "eu_EU"),
)
# Default context, in py!
@@ -337,6 +338,8 @@ WARN_MSGID_NOT_CAPITALIZED_ALLOWED = {
"image path can't be written to",
"in memory to enable editing!",
"jumps over",
+ "left",
+ "right",
"the lazy dog",
"unable to load movie clip",
"unable to load text",
diff --git a/release/scripts/modules/bl_i18n_utils/utils_spell_check.py b/release/scripts/modules/bl_i18n_utils/utils_spell_check.py
index b1aa4e02cee..df014e8262b 100644
--- a/release/scripts/modules/bl_i18n_utils/utils_spell_check.py
+++ b/release/scripts/modules/bl_i18n_utils/utils_spell_check.py
@@ -123,6 +123,7 @@ class SpellChecker:
"mixdown",
"multi",
"multifractal",
+ "multipaint",
"multires", "multiresolution",
"multisampling",
"multitexture",
@@ -158,6 +159,7 @@ class SpellChecker:
"ringnoise",
"rolloff",
"runtime",
+ "scanline",
"screencast", "screenshot", "screenshots",
"selfcollision",
"shadowbuffer", "shadowbuffers",
@@ -240,6 +242,7 @@ class SpellChecker:
"aero",
"amb",
"anim",
+ "app",
"bool",
"calc",
"config", "configs",
@@ -359,6 +362,7 @@ class SpellChecker:
"collada",
"compositing",
"crossfade",
+ "cuda",
"deinterlace",
"dropoff",
"dv",
@@ -408,6 +412,7 @@ class SpellChecker:
# Blender terms
"audaspace",
"bbone",
+ "bmesh",
"breakdowner",
"bspline",
"bweight",
@@ -500,6 +505,7 @@ class SpellChecker:
"asc", "cdl",
"ascii",
"atrac",
+ "avx",
"bsdf",
"bssrdf",
"bw",
@@ -589,6 +595,7 @@ class SpellChecker:
"eps",
"exr",
"fbx",
+ "fbxnode",
"ffmpeg",
"flac",
"gzip",
diff --git a/release/scripts/modules/bl_previews_utils/bl_previews_render.py b/release/scripts/modules/bl_previews_utils/bl_previews_render.py
index 627a6ab2d3d..674c1c00ab1 100644
--- a/release/scripts/modules/bl_previews_utils/bl_previews_render.py
+++ b/release/scripts/modules/bl_previews_utils/bl_previews_render.py
@@ -26,7 +26,7 @@ import os
import sys
import bpy
-from mathutils import Vector, Euler
+from mathutils import Vector, Euler, Matrix
INTERN_PREVIEW_TYPES = {'MATERIAL', 'LAMP', 'WORLD', 'TEXTURE', 'IMAGE'}
@@ -246,13 +246,14 @@ def do_previews(do_objects, do_groups, do_scenes, do_data_intern):
return 'CYCLES'
return 'BLENDER_RENDER'
- def object_bbox_merge(bbox, ob, ob_space):
+ def object_bbox_merge(bbox, ob, ob_space, offset_matrix):
if ob.bound_box:
ob_bbox = ob.bound_box
else:
ob_bbox = ((-ob.scale.x, -ob.scale.y, -ob.scale.z), (ob.scale.x, ob.scale.y, ob.scale.z))
- for v in ob.bound_box:
- v = ob_space.matrix_world.inverted() * ob.matrix_world * Vector(v)
+ for v in ob_bbox:
+ v = offset_matrix * Vector(v) if offset_matrix is not None else Vector(v)
+ v = ob_space.matrix_world.inverted() * ob.matrix_world * v
if bbox[0].x > v.x:
bbox[0].x = v.x
if bbox[0].y > v.y:
@@ -266,11 +267,11 @@ def do_previews(do_objects, do_groups, do_scenes, do_data_intern):
if bbox[1].z < v.z:
bbox[1].z = v.z
- def objects_bbox_calc(camera, objects):
+ def objects_bbox_calc(camera, objects, offset_matrix):
bbox = (Vector((1e9, 1e9, 1e9)), Vector((-1e9, -1e9, -1e9)))
for obname in objects:
ob = bpy.data.objects[obname, None]
- object_bbox_merge(bbox, ob, camera)
+ object_bbox_merge(bbox, ob, camera, offset_matrix)
# Our bbox has been generated in camera local space, bring it back in world one
bbox[0][:] = camera.matrix_world * bbox[0]
bbox[1][:] = camera.matrix_world * bbox[1]
@@ -286,12 +287,12 @@ def do_previews(do_objects, do_groups, do_scenes, do_data_intern):
)
return cos
- def preview_render_do(render_context, item_container, item_name, objects):
+ def preview_render_do(render_context, item_container, item_name, objects, offset_matrix=None):
scene = bpy.data.scenes[render_context.scene, None]
if objects is not None:
camera = bpy.data.objects[render_context.camera, None]
lamp = bpy.data.objects[render_context.lamp, None] if render_context.lamp is not None else None
- cos = objects_bbox_calc(camera, objects)
+ cos = objects_bbox_calc(camera, objects, offset_matrix)
loc, ortho_scale = camera.camera_fit_coords(scene, cos)
camera.location = loc
if lamp:
@@ -322,7 +323,7 @@ def do_previews(do_objects, do_groups, do_scenes, do_data_intern):
prev_scenename = bpy.context.screen.scene.name
if do_objects:
- prev_shown = tuple(ob.hide_render for ob in ids_nolib(bpy.data.objects))
+ prev_shown = {ob.name: ob.hide_render for ob in ids_nolib(bpy.data.objects)}
for ob in ids_nolib(bpy.data.objects):
if ob in objects_ignored:
continue
@@ -368,8 +369,10 @@ def do_previews(do_objects, do_groups, do_scenes, do_data_intern):
scene.objects.unlink(ob)
ob.hide_render = True
- for ob, is_rendered in zip(tuple(ids_nolib(bpy.data.objects)), prev_shown):
- ob.hide_render = is_rendered
+ for ob in ids_nolib(bpy.data.objects):
+ is_rendered = prev_shown.get(ob.name, ...)
+ if is_rendered is not ...:
+ ob.hide_render = is_rendered
if do_groups:
for grp in ids_nolib(bpy.data.groups):
@@ -391,7 +394,9 @@ def do_previews(do_objects, do_groups, do_scenes, do_data_intern):
grp_obname = grp_ob.name
scene.update()
- preview_render_do(render_context, 'groups', grp.name, objects)
+ offset_matrix = Matrix.Translation(grp.dupli_offset).inverted()
+
+ preview_render_do(render_context, 'groups', grp.name, objects, offset_matrix)
scene = bpy.data.scenes[render_context.scene, None]
scene.objects.unlink(bpy.data.objects[grp_obname, None])
@@ -466,14 +471,26 @@ def main():
argv = sys.argv[sys.argv.index("--") + 1:] if "--" in sys.argv else []
parser = argparse.ArgumentParser(description="Use Blender to generate previews for currently open Blender file's items.")
- parser.add_argument('--clear', default=False, action="store_true", help="Clear previews instead of generating them.")
- parser.add_argument('--no_scenes', default=True, action="store_false", help="Do not generate/clear previews for scene IDs.")
- parser.add_argument('--no_groups', default=True, action="store_false", help="Do not generate/clear previews for group IDs.")
- parser.add_argument('--no_objects', default=True, action="store_false", help="Do not generate/clear previews for object IDs.")
+ parser.add_argument('--clear', default=False, action="store_true",
+ help="Clear previews instead of generating them.")
+ parser.add_argument('--no_backups', default=False, action="store_true",
+ help="Do not generate a backup .blend1 file when saving processed ones.")
+ parser.add_argument('--no_scenes', default=True, action="store_false",
+ help="Do not generate/clear previews for scene IDs.")
+ parser.add_argument('--no_groups', default=True, action="store_false",
+ help="Do not generate/clear previews for group IDs.")
+ parser.add_argument('--no_objects', default=True, action="store_false",
+ help="Do not generate/clear previews for object IDs.")
parser.add_argument('--no_data_intern', default=True, action="store_false",
help="Do not generate/clear previews for mat/tex/image/etc. IDs (those handled by core Blender code).")
args = parser.parse_args(argv)
+ orig_save_version = bpy.context.user_preferences.filepaths.save_version
+ if args.no_backups:
+ bpy.context.user_preferences.filepaths.save_version = 0
+ elif orig_save_version < 1:
+ bpy.context.user_preferences.filepaths.save_version = 1
+
if args.clear:
print("clear!")
do_clear_previews(do_objects=args.no_objects, do_groups=args.no_groups, do_scenes=args.no_scenes,
@@ -483,6 +500,9 @@ def main():
do_previews(do_objects=args.no_objects, do_groups=args.no_groups, do_scenes=args.no_scenes,
do_data_intern=args.no_data_intern)
+ # Not really necessary, but better be consistent.
+ bpy.context.user_preferences.filepaths.save_version = orig_save_version
+
if __name__ == "__main__":
print("\n\n *** Running {} *** \n".format(__file__))
diff --git a/release/scripts/modules/bpy/__init__.py b/release/scripts/modules/bpy/__init__.py
index b0d2233b380..f012c1317d4 100644
--- a/release/scripts/modules/bpy/__init__.py
+++ b/release/scripts/modules/bpy/__init__.py
@@ -56,22 +56,16 @@ def main():
# fake module to allow:
# from bpy.types import Panel
- sys.modules["bpy.app"] = app
- sys.modules["bpy.app.handlers"] = app.handlers
- sys.modules["bpy.app.translations"] = app.translations
- sys.modules["bpy.types"] = types
-
- #~ if "-d" in sys.argv: # Enable this to measure start up speed
- if 0:
- import cProfile
- cProfile.run("import bpy; bpy.utils.load_scripts()", "blender.prof")
-
- import pstats
- p = pstats.Stats("blender.prof")
- p.sort_stats("cumulative").print_stats(100)
-
- else:
- utils.load_scripts()
+ sys.modules.update({
+ "bpy.app": app,
+ "bpy.app.handlers": app.handlers,
+ "bpy.app.translations": app.translations,
+ "bpy.types": types,
+ })
+
+ # Initializes Python classes.
+ # (good place to run a profiler or trace).
+ utils.load_scripts()
main()
diff --git a/release/scripts/modules/bpy/path.py b/release/scripts/modules/bpy/path.py
index d7c6101115d..30f6c8eebed 100644
--- a/release/scripts/modules/bpy/path.py
+++ b/release/scripts/modules/bpy/path.py
@@ -35,6 +35,7 @@ __all__ = (
"extensions_audio",
"is_subdir",
"module_names",
+ "native_pathsep",
"reduce_dirs",
"relpath",
"resolve_ncase",
@@ -69,19 +70,23 @@ def abspath(path, start=None, library=None):
if isinstance(path, bytes):
if path.startswith(b"//"):
if library:
- start = _os.path.dirname(abspath(_getattr_bytes(library, "filepath")))
- return _os.path.join(_os.path.dirname(_getattr_bytes(_bpy.data, "filepath"))
- if start is None else start,
- path[2:],
- )
+ start = _os.path.dirname(
+ abspath(_getattr_bytes(library, "filepath")))
+ return _os.path.join(
+ _os.path.dirname(_getattr_bytes(_bpy.data, "filepath"))
+ if start is None else start,
+ path[2:],
+ )
else:
if path.startswith("//"):
if library:
- start = _os.path.dirname(abspath(library.filepath))
- return _os.path.join(_os.path.dirname(_bpy.data.filepath)
- if start is None else start,
- path[2:],
- )
+ start = _os.path.dirname(
+ abspath(library.filepath))
+ return _os.path.join(
+ _os.path.dirname(_bpy.data.filepath)
+ if start is None else start,
+ path[2:],
+ )
return path
@@ -118,13 +123,13 @@ def is_subdir(path, directory):
:arg path: An absolute path.
:type path: string or bytes
"""
- from os.path import normpath, normcase
+ from os.path import normpath, normcase, sep
path = normpath(normcase(path))
directory = normpath(normcase(directory))
if len(path) > len(directory):
- if path.startswith(directory):
- sep = ord(_os.sep) if isinstance(directory, bytes) else _os.sep
- return (path[len(directory)] == sep)
+ sep = sep.encode('ascii') if isinstance(directory, bytes) else sep
+ if path.startswith(directory.rstrip(sep) + sep):
+ return True
return False
@@ -345,6 +350,28 @@ def basename(path):
return _os.path.basename(path[2:] if path[:2] in {"//", b"//"} else path)
+def native_pathsep(path):
+ """
+ Replace the path separator with the systems native ``os.sep``.
+ """
+ if type(path) is str:
+ if _os.sep == "/":
+ return path.replace("\\", "/")
+ else:
+ if path.startswith("//"):
+ return "//" + path[2:].replace("/", "\\")
+ else:
+ return path.replace("/", "\\")
+ else: # bytes
+ if _os.sep == "/":
+ return path.replace(b"\\", b"/")
+ else:
+ if path.startswith(b"//"):
+ return b"//" + path[2:].replace(b"/", b"\\")
+ else:
+ return path.replace(b"/", b"\\")
+
+
def reduce_dirs(dirs):
"""
Given a sequence of directories, remove duplicates and
diff --git a/release/scripts/modules/bpy/utils/__init__.py b/release/scripts/modules/bpy/utils/__init__.py
index 481db4659af..986d8b9f45c 100644
--- a/release/scripts/modules/bpy/utils/__init__.py
+++ b/release/scripts/modules/bpy/utils/__init__.py
@@ -251,7 +251,7 @@ def load_scripts(reload_scripts=False, refresh_scripts=False):
_initialize()
del _addon_utils._initialize
else:
- _addon_utils.reset_all(reload_scripts)
+ _addon_utils.reset_all(reload_scripts=reload_scripts)
del _initialize
# run the active integration preset
diff --git a/release/scripts/modules/bpy/utils/previews.py b/release/scripts/modules/bpy/utils/previews.py
index 965971139e4..c67c523ea72 100644
--- a/release/scripts/modules/bpy/utils/previews.py
+++ b/release/scripts/modules/bpy/utils/previews.py
@@ -86,7 +86,7 @@ class ImagePreviewCollection(dict):
def new(self, name):
if name in self:
- raise KeyException("key %r already exists")
+ raise KeyError("key %r already exists" % name)
p = self[name] = _utils_previews.new(
self._gen_key(name))
return p
@@ -94,7 +94,7 @@ class ImagePreviewCollection(dict):
def load(self, name, path, path_type, force_reload=False):
if name in self:
- raise KeyException("key %r already exists")
+ raise KeyError("key %r already exists" % name)
p = self[name] = _utils_previews.load(
self._gen_key(name), path, path_type, force_reload)
return p
diff --git a/release/scripts/modules/bpy_extras/image_utils.py b/release/scripts/modules/bpy_extras/image_utils.py
index ff6d23badb6..f772aab2b14 100644
--- a/release/scripts/modules/bpy_extras/image_utils.py
+++ b/release/scripts/modules/bpy_extras/image_utils.py
@@ -32,6 +32,8 @@ def load_image(imagepath,
convert_callback=None,
verbose=False,
relpath=None,
+ check_existing=False,
+ force_reload=False,
):
"""
Return an image from the file path with options to search multiple paths
@@ -60,6 +62,14 @@ def load_image(imagepath,
:type convert_callback: function
:arg relpath: If not None, make the file relative to this path.
:type relpath: None or string
+ :arg check_existing: If true,
+ returns already loaded image datablock if possible
+ (based on file path).
+ :type check_existing: bool
+ :arg force_reload: If true,
+ force reloading of image (only useful when `check_existing`
+ is also enabled).
+ :type force_reload: bool
:return: an image or None
:rtype: :class:`bpy.types.Image`
"""
@@ -70,9 +80,12 @@ def load_image(imagepath,
# Utility Functions
def _image_load_placeholder(path):
- name = bpy.path.basename(path)
- if type(name) == bytes:
- name = name.decode("utf-8", "replace")
+ name = path
+ if type(path) is str:
+ name = name.encode("utf-8", "replace")
+ name = name.decode("utf-8", "replace")
+ name = os.path.basename(name)
+
image = bpy.data.images.new(name, 128, 128)
# allow the path to be resolved later
image.filepath = path
@@ -85,8 +98,12 @@ def load_image(imagepath,
if convert_callback:
path = convert_callback(path)
+ # Ensure we're not relying on the 'CWD' to resolve the path.
+ if not os.path.isabs(path):
+ path = os.path.abspath(path)
+
try:
- image = bpy.data.images.load(path)
+ image = bpy.data.images.load(path, check_existing)
except RuntimeError:
image = None
@@ -102,6 +119,8 @@ def load_image(imagepath,
image = _image_load_placeholder(path)
if image:
+ if force_reload:
+ image.reload()
if relpath is not None:
# make relative
from bpy.path import relpath as relpath_fn
@@ -131,6 +150,8 @@ def load_image(imagepath,
# -------------------------------------------------------------------------
+ imagepath = bpy.path.native_pathsep(imagepath)
+
if verbose:
print("load_image('%s', '%s', ...)" % (imagepath, dirname))
diff --git a/release/scripts/modules/bpy_extras/io_utils.py b/release/scripts/modules/bpy_extras/io_utils.py
index 65ccc3f8dc3..a7ecd0b80c0 100644
--- a/release/scripts/modules/bpy_extras/io_utils.py
+++ b/release/scripts/modules/bpy_extras/io_utils.py
@@ -137,7 +137,8 @@ def orientation_helper_factory(name, axis_forward='Y', axis_up='Z'):
def _update_axis_forward(self, context):
if self.axis_forward[-1] == self.axis_up[-1]:
- self.axis_up = self.axis_up[0:-1] + 'XYZ'[('XYZ'.index(self.axis_up[-1]) + 1) % 3]
+ self.axis_up = (self.axis_up[0:-1] +
+ 'XYZ'[('XYZ'.index(self.axis_up[-1]) + 1) % 3])
members['axis_forward'] = EnumProperty(
name="Forward",
@@ -154,7 +155,8 @@ def orientation_helper_factory(name, axis_forward='Y', axis_up='Z'):
def _update_axis_up(self, context):
if self.axis_up[-1] == self.axis_forward[-1]:
- self.axis_forward = self.axis_forward[0:-1] + 'XYZ'[('XYZ'.index(self.axis_forward[-1]) + 1) % 3]
+ self.axis_forward = (self.axis_forward[0:-1] +
+ 'XYZ'[('XYZ'.index(self.axis_forward[-1]) + 1) % 3])
members['axis_up'] = EnumProperty(
name="Up",
diff --git a/release/scripts/modules/bpy_extras/keyconfig_utils.py b/release/scripts/modules/bpy_extras/keyconfig_utils.py
index 7e4c9e885e7..6246e4489e1 100644
--- a/release/scripts/modules/bpy_extras/keyconfig_utils.py
+++ b/release/scripts/modules/bpy_extras/keyconfig_utils.py
@@ -27,14 +27,14 @@ KM_HIERARCHY = [
('Window', 'EMPTY', 'WINDOW', []), # file save, window change, exit
('Screen', 'EMPTY', 'WINDOW', [ # full screen, undo, screenshot
('Screen Editing', 'EMPTY', 'WINDOW', []), # re-sizing, action corners
+ ('Header', 'EMPTY', 'WINDOW', []), # header stuff (per region)
]),
('View2D', 'EMPTY', 'WINDOW', []), # view 2d navigation (per region)
('View2D Buttons List', 'EMPTY', 'WINDOW', []), # view 2d with buttons navigation
- ('Header', 'EMPTY', 'WINDOW', []), # header stuff (per region)
- ('Grease Pencil', 'EMPTY', 'WINDOW', [ # grease pencil stuff (per region)
- ('Grease Pencil Stroke Edit Mode', 'EMPTY', 'WINDOW', []),
+ ('User Interface', 'EMPTY', 'WINDOW', [
+ ('Eyedropper Modal Map', 'EMPTY', 'WINDOW', []),
]),
('3D View', 'VIEW_3D', 'WINDOW', [ # view 3d navigation and generic stuff (select, transform)
@@ -73,10 +73,6 @@ KM_HIERARCHY = [
('3D View Generic', 'VIEW_3D', 'WINDOW', []), # toolbar and properties
]),
- ('Frames', 'EMPTY', 'WINDOW', []), # frame navigation (per region)
- ('Markers', 'EMPTY', 'WINDOW', []), # markers (per region)
- ('Animation', 'EMPTY', 'WINDOW', []), # frame change on click, preview range (per region)
- ('Animation Channels', 'EMPTY', 'WINDOW', []),
('Graph Editor', 'GRAPH_EDITOR', 'WINDOW', [
('Graph Editor Generic', 'GRAPH_EDITOR', 'WINDOW', []),
]),
@@ -85,15 +81,15 @@ KM_HIERARCHY = [
('NLA Channels', 'NLA_EDITOR', 'WINDOW', []),
('NLA Generic', 'NLA_EDITOR', 'WINDOW', []),
]),
+ ('Timeline', 'TIMELINE', 'WINDOW', []),
('Image', 'IMAGE_EDITOR', 'WINDOW', [
- ('UV Editor', 'EMPTY', 'WINDOW', []), # image (reverse order, UVEdit before Image
+ ('UV Editor', 'EMPTY', 'WINDOW', []), # image (reverse order, UVEdit before Image)
('Image Paint', 'EMPTY', 'WINDOW', []), # image and view3d
('UV Sculpt', 'EMPTY', 'WINDOW', []),
('Image Generic', 'IMAGE_EDITOR', 'WINDOW', []),
]),
- ('Timeline', 'TIMELINE', 'WINDOW', []),
('Outliner', 'OUTLINER', 'WINDOW', []),
('Node Editor', 'NODE_EDITOR', 'WINDOW', [
@@ -122,9 +118,17 @@ KM_HIERARCHY = [
('Clip Editor', 'CLIP_EDITOR', 'WINDOW', []),
('Clip Graph Editor', 'CLIP_EDITOR', 'WINDOW', []),
('Clip Dopesheet Editor', 'CLIP_EDITOR', 'WINDOW', []),
- ('Mask Editing', 'EMPTY', 'WINDOW', []), # image (reverse order, UVEdit before Image
]),
+ ('Grease Pencil', 'EMPTY', 'WINDOW', [ # grease pencil stuff (per region)
+ ('Grease Pencil Stroke Edit Mode', 'EMPTY', 'WINDOW', []),
+ ]),
+ ('Mask Editing', 'EMPTY', 'WINDOW', []),
+ ('Frames', 'EMPTY', 'WINDOW', []), # frame navigation (per region)
+ ('Markers', 'EMPTY', 'WINDOW', []), # markers (per region)
+ ('Animation', 'EMPTY', 'WINDOW', []), # frame change on click, preview range (per region)
+ ('Animation Channels', 'EMPTY', 'WINDOW', []),
+
('View3D Gesture Circle', 'EMPTY', 'WINDOW', []),
('Gesture Straight Line', 'EMPTY', 'WINDOW', []),
('Gesture Zoom Border', 'EMPTY', 'WINDOW', []),
@@ -163,13 +167,12 @@ def _export_properties(prefix, properties, kmi_id, lines=None):
def string_value(value):
if isinstance(value, str) or isinstance(value, bool) or isinstance(value, float) or isinstance(value, int):
- result = repr(value)
+ return repr(value)
elif getattr(value, '__len__', False):
return repr(list(value))
- else:
- print("Export key configuration: can't write ", value)
- return result
+ print("Export key configuration: can't write ", value)
+ return ""
for pname in properties.bl_rna.properties.keys():
if pname != "rna_type":
diff --git a/release/scripts/modules/bpy_extras/object_utils.py b/release/scripts/modules/bpy_extras/object_utils.py
index 78fb6aa8fa2..c2c306e5145 100644
--- a/release/scripts/modules/bpy_extras/object_utils.py
+++ b/release/scripts/modules/bpy_extras/object_utils.py
@@ -33,6 +33,7 @@ import bpy
from bpy.props import (
BoolProperty,
+ BoolVectorProperty,
FloatVectorProperty,
)
@@ -136,16 +137,22 @@ def object_data_add(context, obdata, operator=None, use_active_layer=True, name=
if context.space_data and context.space_data.type == 'VIEW_3D':
v3d = context.space_data
- if use_active_layer:
- if v3d and v3d.local_view:
- base.layers_from_view(context.space_data)
- base.layers[scene.active_layer] = True
- else:
- base.layers = [True if i == scene.active_layer
- else False for i in range(len(scene.layers))]
+ if operator is not None and any(operator.layers):
+ base.layers = operator.layers
else:
- if v3d:
- base.layers_from_view(context.space_data)
+ if use_active_layer:
+ if v3d and v3d.local_view:
+ base.layers_from_view(context.space_data)
+ base.layers[scene.active_layer] = True
+ else:
+ base.layers = [True if i == scene.active_layer
+ else False for i in range(len(scene.layers))]
+ else:
+ if v3d:
+ base.layers_from_view(context.space_data)
+
+ if operator is not None:
+ operator.layers = base.layers
obj_new.matrix_world = add_object_align_init(context, operator)
@@ -209,6 +216,12 @@ class AddObjectHelper:
name="Rotation",
subtype='EULER',
)
+ layers = BoolVectorProperty(
+ name="Layers",
+ size=20,
+ subtype='LAYER',
+ options={'HIDDEN', 'SKIP_SAVE'},
+ )
@classmethod
def poll(self, context):
diff --git a/release/scripts/modules/bpy_types.py b/release/scripts/modules/bpy_types.py
index 92dbd2dbd0e..c1a37d10961 100644
--- a/release/scripts/modules/bpy_types.py
+++ b/release/scripts/modules/bpy_types.py
@@ -21,11 +21,13 @@
from _bpy import types as bpy_types
import _bpy
-StructRNA = bpy_types.Struct.__bases__[0]
-StructMetaPropGroup = _bpy.StructMetaPropGroup
+StructRNA = bpy_types.bpy_struct
+StructMetaPropGroup = bpy_types.bpy_struct_meta_idprop
# StructRNA = bpy_types.Struct
bpy_types.BlendDataLibraries.load = _bpy._library_load
+bpy_types.BlendDataLibraries.write = _bpy._library_write
+bpy_types.BlendData.user_map = _bpy._rna_id_collection_user_map
class Context(StructRNA):
@@ -34,8 +36,10 @@ class Context(StructRNA):
def copy(self):
from types import BuiltinMethodType
new_context = {}
- generic_attrs = (list(StructRNA.__dict__.keys()) +
- ["bl_rna", "rna_type", "copy"])
+ generic_attrs = (
+ *StructRNA.__dict__.keys(),
+ "bl_rna", "rna_type", "copy",
+ )
for attr in dir(self):
if not (attr.startswith("_") or attr in generic_attrs):
value = getattr(self, attr)
@@ -205,7 +209,7 @@ class _GenericBone:
@property
def basename(self):
"""The name of this bone before any '.' character"""
- #return self.name.rsplit(".", 1)[0]
+ # return self.name.rsplit(".", 1)[0]
return self.name.split(".")[0]
@property
@@ -405,28 +409,24 @@ class Mesh(bpy_types.ID):
:type faces: iterable object
"""
+ from itertools import chain, islice, accumulate
+
+ face_lengths = tuple(map(len, faces))
+
self.vertices.add(len(vertices))
self.edges.add(len(edges))
- self.loops.add(sum((len(f) for f in faces)))
+ self.loops.add(sum(face_lengths))
self.polygons.add(len(faces))
- vertices_flat = [f for v in vertices for f in v]
- self.vertices.foreach_set("co", vertices_flat)
- del vertices_flat
-
- edges_flat = [i for e in edges for i in e]
- self.edges.foreach_set("vertices", edges_flat)
- del edges_flat
-
- # this is different in bmesh
- loop_index = 0
- for i, p in enumerate(self.polygons):
- f = faces[i]
- loop_len = len(f)
- p.loop_start = loop_index
- p.loop_total = loop_len
- p.vertices = f
- loop_index += loop_len
+ self.vertices.foreach_set("co", tuple(chain.from_iterable(vertices)))
+ self.edges.foreach_set("vertices", tuple(chain.from_iterable(edges)))
+
+ vertex_indices = tuple(chain.from_iterable(faces))
+ loop_starts = tuple(islice(chain([0], accumulate(face_lengths)), len(faces)))
+
+ self.polygons.foreach_set("loop_total", face_lengths)
+ self.polygons.foreach_set("loop_start", loop_starts)
+ self.polygons.foreach_set("vertices", vertex_indices)
# if no edges - calculate them
if faces and (not edges):
diff --git a/release/scripts/modules/console_python.py b/release/scripts/modules/console_python.py
index 59e4f2314d8..64bb002d6a1 100644
--- a/release/scripts/modules/console_python.py
+++ b/release/scripts/modules/console_python.py
@@ -136,33 +136,40 @@ def execute(context, is_interactive):
console, stdout, stderr = get_console(hash(context.region))
- # redirect output
- sys.stdout = stdout
- sys.stderr = stderr
-
- # don't allow the stdin to be used, can lock blender.
- stdin_backup = sys.stdin
- sys.stdin = None
-
if _BPY_MAIN_OWN:
main_mod_back = sys.modules["__main__"]
sys.modules["__main__"] = console._bpy_main_mod
- # in case exception happens
- line = "" # in case of encoding error
- is_multiline = False
+ # redirect output
+ from contextlib import (
+ redirect_stdout,
+ redirect_stderr,
+ )
+
+ # not included with Python
+ class redirect_stdin(redirect_stdout.__base__):
+ _stream = "stdin"
- try:
- line = line_object.body
+ # don't allow the stdin to be used, can lock blender.
+ with redirect_stdout(stdout), \
+ redirect_stderr(stderr), \
+ redirect_stdin(None):
- # run the console, "\n" executes a multi line statement
- line_exec = line if line.strip() else "\n"
+ # in case exception happens
+ line = "" # in case of encoding error
+ is_multiline = False
- is_multiline = console.push(line_exec)
- except:
- # unlikely, but this can happen with unicode errors for example.
- import traceback
- stderr.write(traceback.format_exc())
+ try:
+ line = line_object.body
+
+ # run the console, "\n" executes a multi line statement
+ line_exec = line if line.strip() else "\n"
+
+ is_multiline = console.push(line_exec)
+ except:
+ # unlikely, but this can happen with unicode errors for example.
+ import traceback
+ stderr.write(traceback.format_exc())
if _BPY_MAIN_OWN:
sys.modules["__main__"] = main_mod_back
@@ -174,8 +181,6 @@ def execute(context, is_interactive):
output_err = stderr.read()
# cleanup
- sys.stdout = sys.__stdout__
- sys.stderr = sys.__stderr__
sys.last_traceback = None
# So we can reuse, clear all data
@@ -213,9 +218,6 @@ def execute(context, is_interactive):
if output_err:
add_scrollback(output_err, 'ERROR')
- # restore the stdin
- sys.stdin = stdin_backup
-
# execute any hooks
for func, args in execute.hooks:
func(*args)
diff --git a/release/scripts/modules/progress_report.py b/release/scripts/modules/progress_report.py
index 578eb967fec..fc77a3e998e 100644
--- a/release/scripts/modules/progress_report.py
+++ b/release/scripts/modules/progress_report.py
@@ -99,7 +99,7 @@ class ProgressReport:
def enter_substeps(self, nbr, msg=""):
if msg:
self.update(msg)
- self.steps.append(self.steps[-1] / nbr)
+ self.steps.append(self.steps[-1] / max(nbr, 1))
self.curr_step.append(0)
self.start_time.append(time.time())
diff --git a/release/scripts/modules/rna_keymap_ui.py b/release/scripts/modules/rna_keymap_ui.py
index 21d1959a037..2ca7a7997a5 100644
--- a/release/scripts/modules/rna_keymap_ui.py
+++ b/release/scripts/modules/rna_keymap_ui.py
@@ -70,7 +70,7 @@ def draw_km(display_keymaps, kc, km, children, layout, level):
col = _indented_layout(layout, level)
- row = col.row()
+ row = col.row(align=True)
row.prop(km, "show_expanded_children", text="", emboss=False)
row.label(text=km.name, text_ctxt=i18n_contexts.id_windowmanager)
@@ -89,7 +89,7 @@ def draw_km(display_keymaps, kc, km, children, layout, level):
# Put the Parent key map's entries in a 'global' sub-category
# equal in hierarchy to the other children categories
subcol = _indented_layout(col, level + 1)
- subrow = subcol.row()
+ subrow = subcol.row(align=True)
subrow.prop(km, "show_expanded_items", text="", emboss=False)
subrow.label(text=iface_("%s (Global)") % km.name, translate=False)
else:
@@ -97,25 +97,25 @@ def draw_km(display_keymaps, kc, km, children, layout, level):
# Key Map items
if km.show_expanded_items:
+ kmi_level = level + 3 if children else level + 1
for kmi in km.keymap_items:
- draw_kmi(display_keymaps, kc, km, kmi, col, level + 1)
+ draw_kmi(display_keymaps, kc, km, kmi, col, kmi_level)
# "Add New" at end of keymap item list
- col = _indented_layout(col, level + 1)
- subcol = col.split(percentage=0.2).column()
+ subcol = _indented_layout(col, kmi_level)
+ subcol = subcol.split(percentage=0.2).column()
subcol.operator("wm.keyitem_add", text="Add New", text_ctxt=i18n_contexts.id_windowmanager,
icon='ZOOMIN')
- col.separator()
+ col.separator()
# Child key maps
if children:
- subcol = col.column()
- row = subcol.row()
-
for entry in children:
draw_entry(display_keymaps, entry, col, level + 1)
+ col.separator()
+
def draw_kmi(display_keymaps, kc, km, kmi, layout, level):
map_type = kmi.map_type
@@ -128,7 +128,7 @@ def draw_kmi(display_keymaps, kc, km, kmi, layout, level):
else:
box = col.column()
- split = box.split(percentage=0.05)
+ split = box.split(percentage=0.01)
# header bar
row = split.row()
diff --git a/release/scripts/modules/rna_prop_ui.py b/release/scripts/modules/rna_prop_ui.py
index 195b5767189..c0d92c331b7 100644
--- a/release/scripts/modules/rna_prop_ui.py
+++ b/release/scripts/modules/rna_prop_ui.py
@@ -41,7 +41,8 @@ def rna_idprop_ui_del(item):
def rna_idprop_ui_prop_update(item, prop):
prop_rna = item.path_resolve("[\"%s\"]" % prop.replace("\"", "\\\""), False)
- prop_rna.update()
+ if isinstance(prop_rna, bpy.types.bpy_prop):
+ prop_rna.update()
def rna_idprop_ui_prop_get(item, prop, create=True):
diff --git a/release/scripts/modules/sys_info.py b/release/scripts/modules/sys_info.py
index 8ca3014a31e..49395dd48f0 100644
--- a/release/scripts/modules/sys_info.py
+++ b/release/scripts/modules/sys_info.py
@@ -20,22 +20,15 @@
# classes for extracting info from blenders internal classes
-import bpy
-import bgl
-import sys
+def write_sysinfo(filepath):
+ import sys
-
-def write_sysinfo(op):
import textwrap
+ import subprocess
- output_filename = "system-info.txt"
-
- output = bpy.data.texts.get(output_filename)
- if output:
- output.clear()
- else:
- output = bpy.data.texts.new(name=output_filename)
+ import bpy
+ import bgl
# pretty repr
def prepr(v):
@@ -47,17 +40,19 @@ def write_sysinfo(op):
r = r[1:-1]
return r
+ output = open(filepath, 'w', encoding="utf-8")
header = "= Blender %s System Information =\n" % bpy.app.version_string
- lilies = "%s\n\n" % (len(header) * "=")
- firstlilies = "%s\n" % (len(header) * "=")
- output.write(firstlilies)
+ lilies = "%s\n\n" % ((len(header) - 1) * "=")
+ output.write(lilies[:-1])
output.write(header)
output.write(lilies)
+ def title(text):
+ return "\n%s:\n%s" % (text, lilies)
+
# build info
- output.write("\nBlender:\n")
- output.write(lilies)
+ output.write(title("Blender"))
output.write("version: %s, branch: %s, commit date: %s %s, hash: %s, type: %s\n" %
(bpy.app.version_string,
prepr(bpy.app.build_branch),
@@ -76,16 +71,28 @@ def write_sysinfo(op):
output.write("build system: %s\n" % prepr(bpy.app.build_system))
# python info
- output.write("\nPython:\n")
- output.write(lilies)
+ output.write(title("Python"))
output.write("version: %s\n" % (sys.version))
output.write("paths:\n")
for p in sys.path:
- output.write("\t%r\n" % (p))
-
- output.write("\nDirectories:\n")
- output.write(lilies)
- output.write("scripts: %r\n" % (bpy.utils.script_paths()))
+ output.write("\t%r\n" % p)
+
+ output.write(title("Python (External Binary)"))
+ output.write("binary path: %s\n" % prepr(bpy.app.binary_path_python))
+ try:
+ py_ver = prepr(subprocess.check_output([
+ bpy.app.binary_path_python,
+ "--version",
+ ]).strip())
+ except Exception as e:
+ py_ver = str(e)
+ output.write("version: %s\n" % py_ver)
+ del py_ver
+
+ output.write(title("Directories"))
+ output.write("scripts:\n")
+ for p in bpy.utils.script_paths():
+ output.write("\t%r\n" % p)
output.write("user scripts: %r\n" % (bpy.utils.script_path_user()))
output.write("pref scripts: %r\n" % (bpy.utils.script_path_pref()))
output.write("datafiles: %r\n" % (bpy.utils.user_resource('DATAFILES')))
@@ -94,8 +101,7 @@ def write_sysinfo(op):
output.write("autosave: %r\n" % (bpy.utils.user_resource('AUTOSAVE')))
output.write("tempdir: %r\n" % (bpy.app.tempdir))
- output.write("\nFFmpeg:\n")
- output.write(lilies)
+ output.write(title("FFmpeg"))
ffmpeg = bpy.app.ffmpeg
if ffmpeg.supported:
for lib in ("avcodec", "avdevice", "avformat", "avutil", "swscale"):
@@ -105,8 +111,7 @@ def write_sysinfo(op):
output.write("Blender was built without FFmpeg support\n")
if bpy.app.build_options.sdl:
- output.write("\nSDL\n")
- output.write(lilies)
+ output.write(title("SDL"))
output.write("Version: %s\n" % bpy.app.sdl.version_string)
output.write("Loading method: ")
if bpy.app.build_options.sdl_dynload:
@@ -116,8 +121,7 @@ def write_sysinfo(op):
if not bpy.app.sdl.available:
output.write("WARNING: Blender could not load SDL library\n")
- output.write("\nOther Libraries:\n")
- output.write(lilies)
+ output.write(title("Other Libraries"))
ocio = bpy.app.ocio
output.write("OpenColorIO: ")
if ocio.supported:
@@ -146,54 +150,59 @@ def write_sysinfo(op):
else:
output.write("Blender was built without Cycles support\n")
+ openvdb = bpy.app.openvdb
+ output.write("OpenVDB: ")
+ if openvdb.supported:
+ output.write("%s\n" % openvdb.version_string)
+ else:
+ output.write("Blender was built without OpenVDB support\n")
+
if not bpy.app.build_options.sdl:
output.write("SDL: Blender was built without SDL support\n")
if bpy.app.background:
output.write("\nOpenGL: missing, background mode\n")
else:
- output.write("\nOpenGL\n")
- output.write(lilies)
+ output.write(title("OpenGL"))
version = bgl.glGetString(bgl.GL_RENDERER)
output.write("renderer:\t%r\n" % version)
output.write("vendor:\t\t%r\n" % (bgl.glGetString(bgl.GL_VENDOR)))
output.write("version:\t%r\n" % (bgl.glGetString(bgl.GL_VERSION)))
output.write("extensions:\n")
- glext = bgl.glGetString(bgl.GL_EXTENSIONS)
- glext = textwrap.wrap(glext, 70)
+ glext = sorted(bgl.glGetString(bgl.GL_EXTENSIONS).split())
for l in glext:
output.write("\t%s\n" % l)
- output.write("\nImplementation Dependent OpenGL Limits:\n")
- output.write(lilies)
+ output.write(title("Implementation Dependent OpenGL Limits"))
limit = bgl.Buffer(bgl.GL_INT, 1)
bgl.glGetIntegerv(bgl.GL_MAX_TEXTURE_UNITS, limit)
output.write("Maximum Fixed Function Texture Units:\t%d\n" % limit[0])
+ bgl.glGetIntegerv(bgl.GL_MAX_ELEMENTS_VERTICES, limit)
+ output.write("Maximum DrawElements Vertices:\t%d\n" % limit[0])
+ bgl.glGetIntegerv(bgl.GL_MAX_ELEMENTS_INDICES, limit)
+ output.write("Maximum DrawElements Indices:\t%d\n" % limit[0])
output.write("\nGLSL:\n")
- if version[0] > '1':
- bgl.glGetIntegerv(bgl.GL_MAX_VARYING_FLOATS, limit)
- output.write("Maximum Varying Floats:\t%d\n" % limit[0])
- bgl.glGetIntegerv(bgl.GL_MAX_VERTEX_ATTRIBS, limit)
- output.write("Maximum Vertex Attributes:\t%d\n" % limit[0])
- bgl.glGetIntegerv(bgl.GL_MAX_VERTEX_UNIFORM_COMPONENTS, limit)
- output.write("Maximum Vertex Uniform Components:\t%d\n" % limit[0])
- bgl.glGetIntegerv(bgl.GL_MAX_FRAGMENT_UNIFORM_COMPONENTS, limit)
- output.write("Maximum Fragment Uniform Components:\t%d\n" % limit[0])
- bgl.glGetIntegerv(bgl.GL_MAX_VERTEX_TEXTURE_IMAGE_UNITS, limit)
- output.write("Maximum Vertex Image Units:\t%d\n" % limit[0])
- bgl.glGetIntegerv(bgl.GL_MAX_TEXTURE_IMAGE_UNITS, limit)
- output.write("Maximum Fragment Image Units:\t%d\n" % limit[0])
- bgl.glGetIntegerv(bgl.GL_MAX_COMBINED_TEXTURE_IMAGE_UNITS, limit)
- output.write("Maximum Pipeline Image Units:\t%d\n" % limit[0])
+ bgl.glGetIntegerv(bgl.GL_MAX_VARYING_FLOATS, limit)
+ output.write("Maximum Varying Floats:\t%d\n" % limit[0])
+ bgl.glGetIntegerv(bgl.GL_MAX_VERTEX_ATTRIBS, limit)
+ output.write("Maximum Vertex Attributes:\t%d\n" % limit[0])
+ bgl.glGetIntegerv(bgl.GL_MAX_VERTEX_UNIFORM_COMPONENTS, limit)
+ output.write("Maximum Vertex Uniform Components:\t%d\n" % limit[0])
+ bgl.glGetIntegerv(bgl.GL_MAX_FRAGMENT_UNIFORM_COMPONENTS, limit)
+ output.write("Maximum Fragment Uniform Components:\t%d\n" % limit[0])
+ bgl.glGetIntegerv(bgl.GL_MAX_VERTEX_TEXTURE_IMAGE_UNITS, limit)
+ output.write("Maximum Vertex Image Units:\t%d\n" % limit[0])
+ bgl.glGetIntegerv(bgl.GL_MAX_TEXTURE_IMAGE_UNITS, limit)
+ output.write("Maximum Fragment Image Units:\t%d\n" % limit[0])
+ bgl.glGetIntegerv(bgl.GL_MAX_COMBINED_TEXTURE_IMAGE_UNITS, limit)
+ output.write("Maximum Pipeline Image Units:\t%d\n" % limit[0])
if bpy.app.build_options.cycles:
import cycles
- output.write("\nCycles\n")
- output.write(lilies)
+ output.write(title("Cycles"))
output.write(cycles.engine.system_info())
- output.current_line_index = 0
+ output.close()
- op.report({'INFO'}, "System information generated in 'system-info.txt'")