Welcome to mirror list, hosted at ThFree Co, Russian Federation.

git.blender.org/blender.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
Diffstat (limited to 'tests/python')
-rw-r--r--tests/python/CMakeLists.txt65
-rwxr-xr-xtests/python/alembic_tests.py478
-rw-r--r--tests/python/bl_alembic_import_test.py268
-rw-r--r--tests/python/bl_keymap_completeness.py1
-rw-r--r--tests/python/bl_load_py_modules.py39
-rw-r--r--tests/python/bl_pyapi_idprop_datablock.py338
-rw-r--r--tests/python/pep8.py1
-rw-r--r--tests/python/rna_info_dump.py1
8 files changed, 1174 insertions, 17 deletions
diff --git a/tests/python/CMakeLists.txt b/tests/python/CMakeLists.txt
index f7ca9b02137..9e1ebcbe669 100644
--- a/tests/python/CMakeLists.txt
+++ b/tests/python/CMakeLists.txt
@@ -18,7 +18,7 @@
#
# ***** END GPL LICENSE BLOCK *****
-# --env-system-scripts allows to run without the install target.
+# --env-system-scripts allows to run without the install target.
# Use '--write-blend=/tmp/test.blend' to view output
@@ -47,9 +47,10 @@ else()
set(TEST_BLENDER_EXE ${EXECUTABLE_OUTPUT_PATH}/blender)
endif()
-# for testing with valgrind prefix: valgrind --track-origins=yes --error-limit=no
+# for testing with valgrind prefix: valgrind --track-origins=yes --error-limit=no
set(TEST_BLENDER_EXE_BARE ${TEST_BLENDER_EXE})
-set(TEST_BLENDER_EXE ${TEST_BLENDER_EXE} --background -noaudio --factory-startup --env-system-scripts ${CMAKE_SOURCE_DIR}/release/scripts)
+set(TEST_BLENDER_EXE_PARAMS --background -noaudio --factory-startup --env-system-scripts ${CMAKE_SOURCE_DIR}/release/scripts)
+set(TEST_BLENDER_EXE ${TEST_BLENDER_EXE} ${TEST_BLENDER_EXE_PARAMS} )
# ------------------------------------------------------------------------------
@@ -91,6 +92,10 @@ add_test(script_pyapi_idprop ${TEST_BLENDER_EXE}
--python ${CMAKE_CURRENT_LIST_DIR}/bl_pyapi_idprop.py
)
+add_test(script_pyapi_idprop_datablock ${TEST_BLENDER_EXE}
+ --python ${CMAKE_CURRENT_LIST_DIR}/bl_pyapi_idprop_datablock.py
+)
+
# ------------------------------------------------------------------------------
# MODELING TESTS
add_test(bevel ${TEST_BLENDER_EXE}
@@ -99,8 +104,15 @@ add_test(bevel ${TEST_BLENDER_EXE}
)
add_test(split_faces ${TEST_BLENDER_EXE}
- ${TEST_SRC_DIR}/modeling/split_faces_test.blend
- --python-text run_tests
+ ${TEST_SRC_DIR}/modeling/split_faces_test.blend
+ --python-text run_tests
+)
+
+# ------------------------------------------------------------------------------
+# MODIFIERS TESTS
+add_test(modifier_array ${TEST_BLENDER_EXE}
+ ${TEST_SRC_DIR}/modifier_stack/array_test.blend
+ --python-text run_tests
)
# ------------------------------------------------------------------------------
@@ -438,3 +450,46 @@ if(WITH_CYCLES)
MESSAGE(STATUS "Disabling Cycles tests because tests folder does not exist")
endif()
endif()
+
+if(WITH_ALEMBIC)
+ find_package_wrapper(Alembic)
+ if(NOT ALEMBIC_FOUND)
+ message(FATAL_ERROR "Alembic is enabled but cannot be found")
+ endif()
+ get_filename_component(real_include_dir ${ALEMBIC_INCLUDE_DIR} REALPATH)
+ get_filename_component(ALEMBIC_ROOT_DIR ${real_include_dir} DIRECTORY)
+
+ if(MSVC)
+ add_test(NAME alembic_tests
+ COMMAND
+ "$<TARGET_FILE_DIR:blender>/${BLENDER_VERSION_MAJOR}.${BLENDER_VERSION_MINOR}/python/bin/python$<$<CONFIG:Debug>:_d>"
+ ${CMAKE_CURRENT_LIST_DIR}/alembic_tests.py
+ --blender "$<TARGET_FILE:blender>"
+ --testdir "${TEST_SRC_DIR}/alembic"
+ --alembic-root "${ALEMBIC_ROOT_DIR}"
+ )
+ add_test(NAME script_alembic_import
+ COMMAND
+ "$<TARGET_FILE:blender>" ${TEST_BLENDER_EXE_PARAMS}
+ --python ${CMAKE_CURRENT_LIST_DIR}/bl_alembic_import_test.py
+ --
+ --testdir "${TEST_SRC_DIR}/alembic"
+ --with-legacy-depsgraph=${WITH_LEGACY_DEPSGRAPH}
+ )
+
+ else()
+ add_test(alembic_tests
+ ${CMAKE_CURRENT_LIST_DIR}/alembic_tests.py
+ --blender "${TEST_BLENDER_EXE_BARE}"
+ --testdir "${TEST_SRC_DIR}/alembic"
+ --alembic-root "${ALEMBIC_ROOT_DIR}"
+ )
+ add_test(script_alembic_import ${TEST_BLENDER_EXE}
+ --python ${CMAKE_CURRENT_LIST_DIR}/bl_alembic_import_test.py
+ --
+ --testdir "${TEST_SRC_DIR}/alembic"
+ --with-legacy-depsgraph=${WITH_LEGACY_DEPSGRAPH}
+ )
+
+ endif()
+endif()
diff --git a/tests/python/alembic_tests.py b/tests/python/alembic_tests.py
new file mode 100755
index 00000000000..96a68de9801
--- /dev/null
+++ b/tests/python/alembic_tests.py
@@ -0,0 +1,478 @@
+#!/usr/bin/env python3
+# ##### BEGIN GPL LICENSE BLOCK #####
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License
+# as published by the Free Software Foundation; either version 2
+# of the License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+#
+# ##### END GPL LICENSE BLOCK #####
+
+# <pep8 compliant>
+
+import argparse
+import functools
+import shutil
+import pathlib
+import subprocess
+import sys
+import tempfile
+import unittest
+
+
+def with_tempdir(wrapped):
+ """Creates a temporary directory for the function, cleaning up after it returns normally.
+
+ When the wrapped function raises an exception, the contents of the temporary directory
+ remain available for manual inspection.
+
+ The wrapped function is called with an extra positional argument containing
+ the pathlib.Path() of the temporary directory.
+ """
+
+ @functools.wraps(wrapped)
+ def decorator(*args, **kwargs):
+ dirname = tempfile.mkdtemp(prefix='blender-alembic-test')
+ try:
+ retval = wrapped(*args, pathlib.Path(dirname), **kwargs)
+ except:
+ print('Exception in %s, not cleaning up temporary directory %s' % (wrapped, dirname))
+ raise
+ else:
+ shutil.rmtree(dirname)
+ return retval
+
+ return decorator
+
+
+class AbcPropError(Exception):
+ """Raised when AbstractAlembicTest.abcprop() finds an error."""
+
+
+class AbstractAlembicTest(unittest.TestCase):
+ @classmethod
+ def setUpClass(cls):
+ import re
+
+ cls.blender = args.blender
+ cls.testdir = pathlib.Path(args.testdir)
+ cls.alembic_root = pathlib.Path(args.alembic_root)
+
+ # 'abcls' outputs ANSI colour codes, even when stdout is not a terminal.
+ # See https://github.com/alembic/alembic/issues/120
+ cls.ansi_remove_re = re.compile(rb'\x1b[^m]*m')
+
+ # 'abcls' array notation, like "name[16]"
+ cls.abcls_array = re.compile(r'^(?P<name>[^\[]+)(\[(?P<arraysize>\d+)\])?$')
+
+ def run_blender(self, filepath: str, python_script: str, timeout: int=300) -> str:
+ """Runs Blender by opening a blendfile and executing a script.
+
+ Returns Blender's stdout + stderr combined into one string.
+
+ :param filepath: taken relative to self.testdir.
+ :param timeout: in seconds
+ """
+
+ blendfile = self.testdir / filepath
+
+ command = (
+ self.blender,
+ '--background',
+ '-noaudio',
+ '--factory-startup',
+ '--enable-autoexec',
+ str(blendfile),
+ '-E', 'CYCLES',
+ '--python-exit-code', '47',
+ '--python-expr', python_script,
+ )
+
+ proc = subprocess.run(command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
+ timeout=timeout)
+ output = proc.stdout.decode('utf8')
+ if proc.returncode:
+ self.fail('Error %d running Blender:\n%s' % (proc.returncode, output))
+
+ return output
+
+ def abcprop(self, filepath: pathlib.Path, proppath: str) -> dict:
+ """Uses abcls to obtain compound property values from an Alembic object.
+
+ A dict of subproperties is returned, where the values are Python values.
+
+ The Python bindings for Alembic are old, and only compatible with Python 2.x,
+ so that's why we can't use them here, and have to rely on other tooling.
+ """
+ import collections
+
+ abcls = self.alembic_root / 'bin' / 'abcls'
+
+ command = (str(abcls), '-vl', '%s%s' % (filepath, proppath))
+ proc = subprocess.run(command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
+ timeout=30)
+
+ coloured_output = proc.stdout
+ output = self.ansi_remove_re.sub(b'', coloured_output).decode('utf8')
+
+ # Because of the ANSI colour codes, we need to remove those first before
+ # decoding to text. This means that we cannot use the universal_newlines
+ # parameter to subprocess.run(), and have to do the conversion ourselves
+ output = output.replace('\r\n', '\n').replace('\r', '\n')
+
+ if proc.returncode:
+ raise AbcPropError('Error %d running abcls:\n%s' % (proc.returncode, output))
+
+ # Mapping from value type to callable that can convert a string to Python values.
+ converters = {
+ 'bool_t': int,
+ 'uint8_t': int,
+ 'int16_t': int,
+ 'int32_t': int,
+ 'uint64_t': int,
+ 'float64_t': float,
+ 'float32_t': float,
+ }
+
+ result = {}
+
+ # Ideally we'd get abcls to output JSON, see https://github.com/alembic/alembic/issues/121
+ lines = collections.deque(output.split('\n'))
+ while lines:
+ info = lines.popleft()
+ if not info:
+ continue
+ parts = info.split()
+ proptype = parts[0]
+
+ if proptype == 'CompoundProperty':
+ # To read those, call self.abcprop() on it.
+ continue
+ if len(parts) < 2:
+ raise ValueError('Error parsing result from abcprop: %s', info.strip())
+ valtype_and_arrsize, name_and_extent = parts[1:]
+
+ # Parse name and extent
+ m = self.abcls_array.match(name_and_extent)
+ if not m:
+ self.fail('Unparsable name/extent from abcls: %s' % name_and_extent)
+ name, extent = m.group('name'), m.group('arraysize')
+
+ if extent != '1':
+ self.fail('Unsupported extent %s for property %s/%s' % (extent, proppath, name))
+
+ # Parse type
+ m = self.abcls_array.match(valtype_and_arrsize)
+ if not m:
+ self.fail('Unparsable value type from abcls: %s' % valtype_and_arrsize)
+ valtype, scalarsize = m.group('name'), m.group('arraysize')
+
+ # Convert values
+ try:
+ conv = converters[valtype]
+ except KeyError:
+ self.fail('Unsupported type %s for property %s/%s' % (valtype, proppath, name))
+
+ def convert_single_line(linevalue):
+ try:
+ if scalarsize is None:
+ return conv(linevalue)
+ else:
+ return [conv(v.strip()) for v in linevalue.split(',')]
+ except ValueError as ex:
+ return str(ex)
+
+ if proptype == 'ScalarProperty':
+ value = lines.popleft()
+ result[name] = convert_single_line(value)
+ elif proptype == 'ArrayProperty':
+ arrayvalue = []
+ # Arrays consist of a variable number of items, and end in a blank line.
+ while True:
+ linevalue = lines.popleft()
+ if not linevalue:
+ break
+ arrayvalue.append(convert_single_line(linevalue))
+ result[name] = arrayvalue
+ else:
+ self.fail('Unsupported type %s for property %s/%s' % (proptype, proppath, name))
+
+ return result
+
+ def assertAlmostEqualFloatArray(self, actual, expect, places=6, delta=None):
+ """Asserts that the arrays of floats are almost equal."""
+
+ self.assertEqual(len(actual), len(expect),
+ 'Actual array has %d items, expected %d' % (len(actual), len(expect)))
+
+ for idx, (act, exp) in enumerate(zip(actual, expect)):
+ self.assertAlmostEqual(act, exp, places=places, delta=delta,
+ msg='%f != %f at index %d' % (act, exp, idx))
+
+
+class HierarchicalAndFlatExportTest(AbstractAlembicTest):
+ @with_tempdir
+ def test_hierarchical_export(self, tempdir: pathlib.Path):
+ abc = tempdir / 'cubes_hierarchical.abc'
+ script = "import bpy; bpy.ops.wm.alembic_export(filepath='%s', start=1, end=1, " \
+ "renderable_only=True, visible_layers_only=True, flatten=False)" % abc.as_posix()
+ self.run_blender('cubes-hierarchy.blend', script)
+
+ # Now check the resulting Alembic file.
+ xform = self.abcprop(abc, '/Cube/Cube_002/Cube_012/.xform')
+ self.assertEqual(1, xform['.inherits'])
+ self.assertAlmostEqualFloatArray(
+ xform['.vals'],
+ [1.0, 0.0, 0.0, 0.0,
+ 0.0, 1.0, 0.0, 0.0,
+ 0.0, 0.0, 1.0, 0.0,
+ 3.07484, -2.92265, 0.0586434, 1.0]
+ )
+
+ @with_tempdir
+ def test_flat_export(self, tempdir: pathlib.Path):
+ abc = tempdir / 'cubes_flat.abc'
+ script = "import bpy; bpy.ops.wm.alembic_export(filepath='%s', start=1, end=1, " \
+ "renderable_only=True, visible_layers_only=True, flatten=True)" % abc.as_posix()
+ self.run_blender('cubes-hierarchy.blend', script)
+
+ # Now check the resulting Alembic file.
+ xform = self.abcprop(abc, '/Cube_012/.xform')
+ self.assertEqual(0, xform['.inherits'])
+
+ self.assertAlmostEqualFloatArray(
+ xform['.vals'],
+ [0.343134, 0.485243, 0.804238, 0,
+ 0.0, 0.856222, -0.516608, 0,
+ -0.939287, 0.177266, 0.293799, 0,
+ 1, 3, 4, 1],
+ )
+
+
+class DupliGroupExportTest(AbstractAlembicTest):
+ @with_tempdir
+ def test_hierarchical_export(self, tempdir: pathlib.Path):
+ abc = tempdir / 'dupligroup_hierarchical.abc'
+ script = "import bpy; bpy.ops.wm.alembic_export(filepath='%s', start=1, end=1, " \
+ "renderable_only=True, visible_layers_only=True, flatten=False)" % abc.as_posix()
+ self.run_blender('dupligroup-scene.blend', script)
+
+ # Now check the resulting Alembic file.
+ xform = self.abcprop(abc, '/Real_Cube/Linked_Suzanne/Cylinder/Suzanne/.xform')
+ self.assertEqual(1, xform['.inherits'])
+ self.assertAlmostEqualFloatArray(
+ xform['.vals'],
+ [1.0, 0.0, 0.0, 0.0,
+ 0.0, 1.0, 0.0, 0.0,
+ 0.0, 0.0, 1.0, 0.0,
+ 0.0, 2.0, 0.0, 1.0]
+ )
+
+ @with_tempdir
+ def test_flat_export(self, tempdir: pathlib.Path):
+ abc = tempdir / 'dupligroup_hierarchical.abc'
+ script = "import bpy; bpy.ops.wm.alembic_export(filepath='%s', start=1, end=1, " \
+ "renderable_only=True, visible_layers_only=True, flatten=True)" % abc.as_posix()
+ self.run_blender('dupligroup-scene.blend', script)
+
+ # Now check the resulting Alembic file.
+ xform = self.abcprop(abc, '/Suzanne/.xform')
+ self.assertEqual(0, xform['.inherits'])
+
+ self.assertAlmostEqualFloatArray(
+ xform['.vals'],
+ [1.5, 0.0, 0.0, 0.0,
+ 0.0, 1.5, 0.0, 0.0,
+ 0.0, 0.0, 1.5, 0.0,
+ 2.0, 3.0, 0.0, 1.0]
+ )
+
+
+class CurveExportTest(AbstractAlembicTest):
+ @with_tempdir
+ def test_export_single_curve(self, tempdir: pathlib.Path):
+ abc = tempdir / 'single-curve.abc'
+ script = "import bpy; bpy.ops.wm.alembic_export(filepath='%s', start=1, end=1, " \
+ "renderable_only=True, visible_layers_only=True, flatten=False)" % abc.as_posix()
+ self.run_blender('single-curve.blend', script)
+
+ # Now check the resulting Alembic file.
+ abcprop = self.abcprop(abc, '/NurbsCurve/NurbsCurveShape/.geom')
+ self.assertEqual(abcprop['.orders'], [4])
+
+ abcprop = self.abcprop(abc, '/NurbsCurve/NurbsCurveShape/.geom/.userProperties')
+ self.assertEqual(abcprop['blender:resolution'], 10)
+
+
+class HairParticlesExportTest(AbstractAlembicTest):
+ """Tests exporting with/without hair/particles.
+
+ Just a basic test to ensure that the enabling/disabling works, and that export
+ works at all. NOT testing the quality/contents of the exported file.
+ """
+
+ def _do_test(self, tempdir: pathlib.Path, export_hair: bool, export_particles: bool) -> pathlib.Path:
+ abc = tempdir / 'hair-particles.abc'
+ script = "import bpy; bpy.ops.wm.alembic_export(filepath='%s', start=1, end=1, " \
+ "renderable_only=True, visible_layers_only=True, flatten=False, " \
+ "export_hair=%r, export_particles=%r, as_background_job=False)" \
+ % (abc.as_posix(), export_hair, export_particles)
+ self.run_blender('hair-particles.blend', script)
+ return abc
+
+ @with_tempdir
+ def test_with_both(self, tempdir: pathlib.Path):
+ abc = self._do_test(tempdir, True, True)
+
+ abcprop = self.abcprop(abc, '/Suzanne/Hair system/.geom')
+ self.assertIn('nVertices', abcprop)
+
+ abcprop = self.abcprop(abc, '/Suzanne/Non-hair particle system/.geom')
+ self.assertIn('.velocities', abcprop)
+
+ abcprop = self.abcprop(abc, '/Suzanne/SuzanneShape/.geom')
+ self.assertIn('.faceIndices', abcprop)
+
+ @with_tempdir
+ def test_with_hair_only(self, tempdir: pathlib.Path):
+ abc = self._do_test(tempdir, True, False)
+
+ abcprop = self.abcprop(abc, '/Suzanne/Hair system/.geom')
+ self.assertIn('nVertices', abcprop)
+
+ self.assertRaises(AbcPropError, self.abcprop, abc,
+ '/Suzanne/Non-hair particle system/.geom')
+
+ abcprop = self.abcprop(abc, '/Suzanne/SuzanneShape/.geom')
+ self.assertIn('.faceIndices', abcprop)
+
+ @with_tempdir
+ def test_with_particles_only(self, tempdir: pathlib.Path):
+ abc = self._do_test(tempdir, False, True)
+
+ self.assertRaises(AbcPropError, self.abcprop, abc, '/Suzanne/Hair system/.geom')
+
+ abcprop = self.abcprop(abc, '/Suzanne/Non-hair particle system/.geom')
+ self.assertIn('.velocities', abcprop)
+
+ abcprop = self.abcprop(abc, '/Suzanne/SuzanneShape/.geom')
+ self.assertIn('.faceIndices', abcprop)
+
+ @with_tempdir
+ def test_with_neither(self, tempdir: pathlib.Path):
+ abc = self._do_test(tempdir, False, False)
+
+ self.assertRaises(AbcPropError, self.abcprop, abc, '/Suzanne/Hair system/.geom')
+ self.assertRaises(AbcPropError, self.abcprop, abc,
+ '/Suzanne/Non-hair particle system/.geom')
+
+ abcprop = self.abcprop(abc, '/Suzanne/SuzanneShape/.geom')
+ self.assertIn('.faceIndices', abcprop)
+
+
+class LongNamesExportTest(AbstractAlembicTest):
+ @with_tempdir
+ def test_export_long_names(self, tempdir: pathlib.Path):
+ abc = tempdir / 'long-names.abc'
+ script = "import bpy; bpy.ops.wm.alembic_export(filepath='%s', start=1, end=1, " \
+ "renderable_only=False, visible_layers_only=False, flatten=False)" % abc.as_posix()
+ self.run_blender('long-names.blend', script)
+
+ name_parts = [
+ 'foG9aeLahgoh5goacee1dah6Hethaghohjaich5pasizairuWigee1ahPeekiGh',
+ 'yoNgoisheedah2ua0eigh2AeCaiTee5bo0uphoo7Aixephah9racahvaingeeH4',
+ 'zuthohnoi1thooS3eezoo8seuph2Boo5aefacaethuvee1aequoonoox1sookie',
+ 'wugh4ciTh3dipiepeequait5uug7thiseek5ca7Eijei5ietaizokohhaecieto',
+ 'up9aeheenein9oteiX6fohP3thiez6Ahvah0oohah1ep2Eesho4Beboechaipoh',
+ 'coh4aehiacheTh0ue0eegho9oku1lohl4loht9ohPoongoow7dasiego6yimuis',
+ 'lohtho8eigahfeipohviepajaix4it2peeQu6Iefee1nevihaes4cee2soh4noy',
+ 'kaht9ahv0ieXaiyih7ohxe8bah7eeyicahjoa2ohbu7Choxua7oongah6sei4bu',
+ 'deif0iPaechohkee5nahx6oi2uJeeN7ze3seunohJibe4shai0mah5Iesh3Quai',
+ 'ChohDahshooNee0NeNohthah0eiDeese3Vu6ohShil1Iey9ja0uebi2quiShae6',
+ 'Dee1kai7eiph2ahh2nufah3zai3eexeengohQue1caj0eeW0xeghi3eshuadoot',
+ 'aeshiup3aengajoog0AhCoo5tiu3ieghaeGhie4Tu1ohh1thee8aepheingah1E',
+ 'ooRa6ahciolohshaifoopeo9ZeiGhae2aech4raisheiWah9AaNga0uas9ahquo',
+ 'thaepheip2aip6shief4EaXopei8ohPo0ighuiXah2ashowai9nohp4uach6Mei',
+ 'ohph4yaev3quieji3phophiem3OoNuisheepahng4waithae3Naichai7aw3noo',
+ 'aibeawaneBahmieyuph8ieng8iopheereeD2uu9Uyee5bei2phahXeir8eeJ8oo',
+ 'ooshahphei2hoh3uth5chaen7ohsai6uutiesucheichai8ungah9Gie1Aiphie',
+ 'eiwohchoo7ere2iebohn4Aapheichaelooriiyaoxaik7ooqua7aezahx0aeJei',
+ 'Vah0ohgohphiefohTheshieghichaichahch5moshoo0zai5eeva7eisi4yae8T',
+ 'EibeeN0fee0Gohnguz8iec6yeigh7shuNg4eingu3siph9joucahpeidoom4ree',
+ 'iejiu3shohheeZahHusheimeefaihoh5eecachu5eeZie9ceisugu9taidohT3U',
+ 'eex6dilakaix5Eetai7xiCh5Jaa8aiD4Ag3tuij1aijohv5fo0heevah8hohs3m',
+ 'ohqueeNgahraew6uraemohtoo5qua3oojiex6ohqu6Aideibaithaiphuriquie',
+ 'cei0eiN4Shiey7Aeluy3unohboo5choiphahc2mahbei5paephaiKeso1thoog1',
+ 'ieghif4ohKequ7ong0jah5ooBah0eiGh1caechahnahThae9Shoo0phopashoo4',
+ 'roh9er3thohwi5am8iequeequuSh3aic0voocai3ihi5nie2abahphupiegh7vu',
+ 'uv3Quei7wujoo5beingei2aish5op4VaiX0aebai7iwoaPee5pei8ko9IepaPig',
+ 'co7aegh5beitheesi9lu7jeeQu3johgeiphee9cheichi8aithuDehu2gaeNein',
+ 'thai3Tiewoo4nuir1ohy4aithiuZ7shae1luuwei5phibohriepe2paeci1Ach8',
+ 'phoi3ribah7ufuvoh8eigh1oB6deeBaiPohphaghiPieshahfah5EiCi3toogoo',
+ 'aiM8geil7ooreinee4Cheiwea4yeec8eeshi7Sei4Shoo3wu6ohkaNgooQu1mai',
+ 'agoo3faciewah9ZeesiXeereek7am0eigaeShie3Tisu8haReeNgoo0ci2Hae5u',
+ 'Aesatheewiedohshaephaenohbooshee8eu7EiJ8isal1laech2eiHo0noaV3ta',
+ 'liunguep3ooChoo4eir8ahSie8eenee0oo1TooXu8Cais8Aimo4eir6Phoo3xei',
+ 'toe9heepeobein3teequachemei0Cejoomef9ujie3ohwae9AiNgiephi3ep0de',
+ 'ua6xooY9uzaeB3of6sheiyaedohoiS5Eev0Aequ9ahm1zoa5Aegh3ooz9ChahDa',
+ 'eevasah6Bu9wi7EiwiequumahkaeCheegh6lui8xoh4eeY4ieneavah8phaibun',
+ 'AhNgei2sioZeeng6phaecheemeehiShie5eFeiTh6ooV8iiphabud0die4siep4',
+ 'kushe6Xieg6ahQuoo9aex3aipheefiec1esa7OhBuG0ueziep9phai5eegh1vie',
+ 'Jie5yu8aafuQuoh9shaep3moboh3Pooy7och8oC6obeik6jaew2aiLooweib3ch',
+ 'ohohjajaivaiRail3odaimei6aekohVaicheip2wu7phieg5Gohsaing2ahxaiy',
+ 'hahzaht6yaiYu9re9jah9loisiit4ahtoh2quoh9xohishioz4oo4phofu3ogha',
+ 'pu4oorea0uh2tahB8aiZoonge1aophaes6ogaiK9ailaigeej4zoVou8ielotee',
+ 'cae2thei3Luphuqu0zeeG8leeZuchahxaicai4ui4Eedohte9uW6gae8Geeh0ea',
+ 'air7tuy7ohw5sho2Tahpai8aep4so5ria7eaShus5weaqu0Naquei2xaeyoo2ae',
+ 'vohge4aeCh7ahwoo7Jaex6sohl0Koong4Iejisei8Coir0iemeiz9uru9Iebaep',
+ 'aepeidie8aiw6waish9gie4Woolae2thuj5phae4phexux7gishaeph4Deu7ooS',
+ 'vahc5ia0xohHooViT0uyuxookiaquu2ogueth0ahquoudeefohshai8aeThahba',
+ 'mun3oagah2eequaenohfoo8DaigeghoozaV2eiveeQuee7kah0quaa6tiesheet',
+ 'ooSet4IdieC4ugow3za0die4ohGoh1oopoh6luaPhaeng4Eechea1hae0eimie5',
+ 'iedeimadaefu2NeiPaey2jooloov5iehiegeakoo4ueso7aeK9ahqu2Thahkaes',
+ 'nahquah9Quuu2uuf0aJah7eishi2siegh8ue5eiJa2EeVu8ebohkepoh4dahNgo',
+ 'io1bie7chioPiej5ae2oohe2fee6ooP2thaeJohjohb9Se8tang3eipaifeimai',
+ 'oungoqu6dieneejiechez1xeD2Zi9iox2Ahchaiy9ithah3ohVoolu2euQuuawo',
+ 'thaew0veigei4neishohd8mecaixuqu7eeshiex1chaigohmoThoghoitoTa0Eo',
+ 'ahroob2phohvaiz0Ohteik2ohtakie6Iu1vitho8IyiyeeleeShae9defaiw9ki',
+ 'DohHoothohzeaxolai3Toh5eJie7ahlah9reF0ohn1chaipoogain2aibahw4no',
+ 'aif8lo5she4aich5cho2rie8ieJaujeem2Joongeedae4vie3tah1Leequaix1O',
+ 'Aang0Shaih6chahthie1ahZ7aewei9thiethee7iuThah3yoongi8ahngiobaa5',
+ 'iephoBuayoothah0Ru6aichai4aiw8deg1umongauvaixai3ohy6oowohlee8ei',
+ 'ohn5shigoameer0aejohgoh8oChohlaecho9jie6shu0ahg9Bohngau6paevei9',
+ 'edahghaishak0paigh1eecuich3aad7yeB0ieD6akeeliem2beifufaekee6eat',
+ 'hiechahgheloh2zo7Ieghaiph0phahhu8aeyuiKie1xeipheech9zai4aeme0ee',
+ 'Cube'
+ ]
+ name = '/' + '/'.join(name_parts)
+
+ # Now check the resulting Alembic file.
+ abcprop = self.abcprop(abc, '%s/.xform' % name)
+ self.assertEqual(abcprop['.vals'], [
+ 1.0, 0.0, 0.0, 0.0,
+ 0.0, 1.0, 0.0, 0.0,
+ 0.0, 0.0, 1.0, 0.0,
+ 0.0, 3.0, 0.0, 1.0,
+ ])
+
+ abcprop = self.abcprop(abc, '%s/CubeShape/.geom' % name)
+ self.assertIn('.faceCounts', abcprop)
+
+
+if __name__ == '__main__':
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--blender', required=True)
+ parser.add_argument('--testdir', required=True)
+ parser.add_argument('--alembic-root', required=True)
+ args, remaining = parser.parse_known_args()
+
+ unittest.main(argv=sys.argv[0:1] + remaining)
diff --git a/tests/python/bl_alembic_import_test.py b/tests/python/bl_alembic_import_test.py
new file mode 100644
index 00000000000..f45748f86e7
--- /dev/null
+++ b/tests/python/bl_alembic_import_test.py
@@ -0,0 +1,268 @@
+# ##### BEGIN GPL LICENSE BLOCK #####
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License
+# as published by the Free Software Foundation; either version 2
+# of the License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+#
+# ##### END GPL LICENSE BLOCK #####
+
+# <pep8 compliant>
+
+"""
+./blender.bin --background -noaudio --factory-startup --python tests/python/bl_alembic_import_test.py -- --testdir /path/to/lib/tests/alembic
+"""
+
+import pathlib
+import sys
+import unittest
+
+import bpy
+
+args = None
+
+
+class AbstractAlembicTest(unittest.TestCase):
+ @classmethod
+ def setUpClass(cls):
+ cls.testdir = args.testdir
+
+ def setUp(self):
+ self.assertTrue(self.testdir.exists(),
+ 'Test dir %s should exist' % self.testdir)
+
+ # Make sure we always start with a known-empty file.
+ bpy.ops.wm.open_mainfile(filepath=str(self.testdir / "empty.blend"))
+
+ def assertAlmostEqualFloatArray(self, actual, expect, places=6, delta=None):
+ """Asserts that the arrays of floats are almost equal."""
+
+ self.assertEqual(len(actual), len(expect),
+ 'Actual array has %d items, expected %d' % (len(actual), len(expect)))
+
+ for idx, (act, exp) in enumerate(zip(actual, expect)):
+ self.assertAlmostEqual(act, exp, places=places, delta=delta,
+ msg='%f != %f at index %d' % (act, exp, idx))
+
+
+class SimpleImportTest(AbstractAlembicTest):
+ def test_import_cube_hierarchy(self):
+ res = bpy.ops.wm.alembic_import(
+ filepath=str(self.testdir / "cubes-hierarchy.abc"),
+ as_background_job=False)
+ self.assertEqual({'FINISHED'}, res)
+
+ # The objects should be linked to scene_collection in Blender 2.8,
+ # and to scene in Blender 2.7x.
+ objects = bpy.context.scene.objects
+ self.assertEqual(13, len(objects))
+
+ # Test the hierarchy.
+ self.assertIsNone(objects['Cube'].parent)
+ self.assertEqual(objects['Cube'], objects['Cube_001'].parent)
+ self.assertEqual(objects['Cube'], objects['Cube_002'].parent)
+ self.assertEqual(objects['Cube'], objects['Cube_003'].parent)
+ self.assertEqual(objects['Cube_003'], objects['Cube_004'].parent)
+ self.assertEqual(objects['Cube_003'], objects['Cube_005'].parent)
+ self.assertEqual(objects['Cube_003'], objects['Cube_006'].parent)
+
+ def test_inherit_or_not(self):
+ res = bpy.ops.wm.alembic_import(
+ filepath=str(self.testdir / "T52022-inheritance.abc"),
+ as_background_job=False)
+ self.assertEqual({'FINISHED'}, res)
+
+ # The objects should be linked to scene_collection in Blender 2.8,
+ # and to scene in Blender 2.7x.
+ objects = bpy.context.scene.objects
+
+ # ABC parent is top-level object, which translates to nothing in Blender
+ self.assertIsNone(objects['locator1'].parent)
+
+ # ABC parent is locator1, but locator2 has "inherits Xforms" = false, which
+ # translates to "no parent" in Blender.
+ self.assertIsNone(objects['locator2'].parent)
+
+ # Shouldn't have inherited the ABC parent's transform.
+ x, y, z = objects['locator2'].matrix_world.to_translation()
+ self.assertAlmostEqual(0, x)
+ self.assertAlmostEqual(0, y)
+ self.assertAlmostEqual(2, z)
+
+ # ABC parent is inherited and translates to normal parent in Blender.
+ self.assertEqual(objects['locator2'], objects['locatorShape2'].parent)
+
+ # Should have inherited its ABC parent's transform.
+ x, y, z = objects['locatorShape2'].matrix_world.to_translation()
+ self.assertAlmostEqual(0, x)
+ self.assertAlmostEqual(0, y)
+ self.assertAlmostEqual(2, z)
+
+
+ def test_select_after_import(self):
+ # Add a sphere, so that there is something in the scene, selected, and active,
+ # before we do the Alembic import.
+ bpy.ops.mesh.primitive_uv_sphere_add()
+ sphere = bpy.context.active_object
+ self.assertEqual('Sphere', sphere.name)
+ self.assertEqual([sphere], bpy.context.selected_objects)
+
+ bpy.ops.wm.alembic_import(
+ filepath=str(self.testdir / "cubes-hierarchy.abc"),
+ as_background_job=False)
+
+ # The active object is probably the first one that was imported, but this
+ # behaviour is not defined. At least it should be one of the cubes, and
+ # not the sphere.
+ self.assertNotEqual(sphere, bpy.context.active_object)
+ self.assertTrue('Cube' in bpy.context.active_object.name)
+
+ # All cubes should be selected, but the sphere shouldn't be.
+ for ob in bpy.data.objects:
+ self.assertEqual('Cube' in ob.name, ob.select)
+
+ def test_change_path_constraint(self):
+ import math
+
+ fname = 'cube-rotating1.abc'
+ abc = self.testdir / fname
+ relpath = bpy.path.relpath(str(abc))
+
+ res = bpy.ops.wm.alembic_import(filepath=str(abc), as_background_job=False)
+ self.assertEqual({'FINISHED'}, res)
+ cube = bpy.context.active_object
+
+ # Check that the file loaded ok.
+ bpy.context.scene.frame_set(10)
+ x, y, z = cube.matrix_world.to_euler('XYZ')
+ self.assertAlmostEqual(x, 0)
+ self.assertAlmostEqual(y, 0)
+ self.assertAlmostEqual(z, math.pi / 2, places=5)
+
+ # Change path from absolute to relative. This should not break the animation.
+ bpy.context.scene.frame_set(1)
+ bpy.data.cache_files[fname].filepath = relpath
+ bpy.context.scene.frame_set(10)
+
+ x, y, z = cube.matrix_world.to_euler('XYZ')
+ self.assertAlmostEqual(x, 0)
+ self.assertAlmostEqual(y, 0)
+ self.assertAlmostEqual(z, math.pi / 2, places=5)
+
+ # Replace the Alembic file; this should apply new animation.
+ bpy.data.cache_files[fname].filepath = relpath.replace('1.abc', '2.abc')
+ bpy.context.scene.update()
+
+ if args.with_legacy_depsgraph:
+ bpy.context.scene.frame_set(10)
+
+ x, y, z = cube.matrix_world.to_euler('XYZ')
+ self.assertAlmostEqual(x, math.pi / 2, places=5)
+ self.assertAlmostEqual(y, 0)
+ self.assertAlmostEqual(z, 0)
+
+ def test_change_path_modifier(self):
+ import math
+
+ fname = 'animated-mesh.abc'
+ abc = self.testdir / fname
+ relpath = bpy.path.relpath(str(abc))
+
+ res = bpy.ops.wm.alembic_import(filepath=str(abc), as_background_job=False)
+ self.assertEqual({'FINISHED'}, res)
+ cube = bpy.context.active_object
+
+ # Check that the file loaded ok.
+ bpy.context.scene.frame_set(6)
+ self.assertAlmostEqual(-1, cube.data.vertices[0].co.x)
+ self.assertAlmostEqual(-1, cube.data.vertices[0].co.y)
+ self.assertAlmostEqual(0.5905638933181763, cube.data.vertices[0].co.z)
+
+ # Change path from absolute to relative. This should not break the animation.
+ bpy.context.scene.frame_set(1)
+ bpy.data.cache_files[fname].filepath = relpath
+ bpy.context.scene.frame_set(6)
+
+ self.assertAlmostEqual(1, cube.data.vertices[3].co.x)
+ self.assertAlmostEqual(1, cube.data.vertices[3].co.y)
+ self.assertAlmostEqual(0.5905638933181763, cube.data.vertices[3].co.z)
+
+ def test_import_long_names(self):
+ # This file contains very long names. The longest name is 4047 chars.
+ bpy.ops.wm.alembic_import(
+ filepath=str(self.testdir / "long-names.abc"),
+ as_background_job=False)
+
+ self.assertIn('Cube', bpy.data.objects)
+ self.assertEqual('CubeShape', bpy.data.objects['Cube'].data.name)
+
+
+class VertexColourImportTest(AbstractAlembicTest):
+ def test_import_from_houdini(self):
+ # Houdini saved "face-varying", and as RGB.
+ res = bpy.ops.wm.alembic_import(
+ filepath=str(self.testdir / "vertex-colours-houdini.abc"),
+ as_background_job=False)
+ self.assertEqual({'FINISHED'}, res)
+
+ ob = bpy.context.active_object
+ layer = ob.data.vertex_colors['Cf'] # MeshLoopColorLayer
+
+ # Test some known-good values.
+ self.assertAlmostEqualFloatArray(layer.data[0].color, (0, 0, 0))
+ self.assertAlmostEqualFloatArray(layer.data[98].color, (0.9019607, 0.4745098, 0.2666666))
+ self.assertAlmostEqualFloatArray(layer.data[99].color, (0.8941176, 0.4705882, 0.2627451))
+
+ def test_import_from_blender(self):
+ # Blender saved per-vertex, and as RGBA.
+ res = bpy.ops.wm.alembic_import(
+ filepath=str(self.testdir / "vertex-colours-blender.abc"),
+ as_background_job=False)
+ self.assertEqual({'FINISHED'}, res)
+
+ ob = bpy.context.active_object
+ layer = ob.data.vertex_colors['Cf'] # MeshLoopColorLayer
+
+ # Test some known-good values.
+ self.assertAlmostEqualFloatArray(layer.data[0].color, (1.0, 0.0156862, 0.3607843))
+ self.assertAlmostEqualFloatArray(layer.data[98].color, (0.0941176, 0.1215686, 0.9137254))
+ self.assertAlmostEqualFloatArray(layer.data[99].color, (0.1294117, 0.3529411, 0.7529411))
+
+
+def main():
+ global args
+ import argparse
+
+ if '--' in sys.argv:
+ argv = [sys.argv[0]] + sys.argv[sys.argv.index('--') + 1:]
+ else:
+ argv = sys.argv
+
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--testdir', required=True, type=pathlib.Path)
+ parser.add_argument('--with-legacy-depsgraph', default=False,
+ type=lambda v: v in {'ON', 'YES', 'TRUE'})
+ args, remaining = parser.parse_known_args(argv)
+
+ unittest.main(argv=remaining)
+
+
+if __name__ == "__main__":
+ import traceback
+ # So a python error exits Blender itself too
+ try:
+ main()
+ except SystemExit:
+ raise
+ except:
+ traceback.print_exc()
+ sys.exit(1)
diff --git a/tests/python/bl_keymap_completeness.py b/tests/python/bl_keymap_completeness.py
index 00322907f69..652ed449a3c 100644
--- a/tests/python/bl_keymap_completeness.py
+++ b/tests/python/bl_keymap_completeness.py
@@ -80,5 +80,6 @@ def main():
import sys
sys.exit(1)
+
if __name__ == "__main__":
main()
diff --git a/tests/python/bl_load_py_modules.py b/tests/python/bl_load_py_modules.py
index c13679d16f0..7ffececd1d9 100644
--- a/tests/python/bl_load_py_modules.py
+++ b/tests/python/bl_load_py_modules.py
@@ -36,6 +36,9 @@ BLACKLIST = {
"cycles",
"io_export_dxf", # TODO, check on why this fails
'io_import_dxf', # Because of cydxfentity.so dependency
+
+ # The unpacked wheel is only loaded when actually used, not directly on import:
+ os.path.join("io_blend_utils", "blender_bam-unpacked.whl"),
}
# Some modules need to add to the `sys.path`.
@@ -175,15 +178,28 @@ def load_modules():
for f in MODULE_SYS_PATHS.get(mod_name_full, ())
])
- __import__(mod_name_full)
- mod_imp = sys.modules[mod_name_full]
-
- sys.path[:] = sys_path_back
-
- # check we load what we ask for.
- assert(os.path.samefile(mod_imp.__file__, submod_full))
-
- modules.append(mod_imp)
+ try:
+ __import__(mod_name_full)
+ mod_imp = sys.modules[mod_name_full]
+
+ sys.path[:] = sys_path_back
+
+ # check we load what we ask for.
+ assert(os.path.samefile(mod_imp.__file__, submod_full))
+
+ modules.append(mod_imp)
+ except Exception as e:
+ import traceback
+ # Module might fail to import, but we don't want whole test to fail here.
+ # Reasoning:
+ # - This module might be in ignored list (for example, preset or template),
+ # so failing here will cause false-positive test failure.
+ # - If this is module which should not be ignored, it is not added to list
+ # of successfully loaded modules, meaning the test will catch this
+ # import failure.
+ # - We want to catch all failures of this script instead of stopping on
+ # a first big failure.
+ traceback.print_exc()
#
# check which filepaths we didn't load
@@ -211,11 +227,10 @@ def load_modules():
[(os.sep + f + ".py") for f in BLACKLIST])
for f in source_files:
- ok = False
for ignore in ignore_paths:
if ignore in f:
- ok = True
- if not ok:
+ break
+ else:
raise Exception("Source file %r not loaded in test" % f)
print("loaded %d modules" % len(loaded_files))
diff --git a/tests/python/bl_pyapi_idprop_datablock.py b/tests/python/bl_pyapi_idprop_datablock.py
new file mode 100644
index 00000000000..4acfb83bd95
--- /dev/null
+++ b/tests/python/bl_pyapi_idprop_datablock.py
@@ -0,0 +1,338 @@
+# ##### BEGIN GPL LICENSE BLOCK #####
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License
+# as published by the Free Software Foundation; either version 2
+# of the License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+#
+# ##### END GPL LICENSE BLOCK #####
+
+import bpy
+import sys
+import os
+import tempfile
+import traceback
+import inspect
+from bpy.types import UIList
+
+arr_len = 100
+ob_cp_count = 100
+lib_path = os.path.join(tempfile.gettempdir(), "lib.blend")
+test_path = os.path.join(tempfile.gettempdir(), "test.blend")
+
+
+def print_fail_msg_and_exit(msg):
+ def __LINE__():
+ try:
+ raise Exception
+ except:
+ return sys.exc_info()[2].tb_frame.f_back.f_back.f_back.f_lineno
+
+ def __FILE__():
+ return inspect.currentframe().f_code.co_filename
+
+ print("'%s': %d >> %s" % (__FILE__(), __LINE__(), msg), file=sys.stderr)
+ sys.stderr.flush()
+ sys.stdout.flush()
+ os._exit(1)
+
+
+def abort_if_false(expr, msg=None):
+ if not expr:
+ if not msg:
+ msg = "test failed"
+ print_fail_msg_and_exit(msg)
+
+
+class TestClass(bpy.types.PropertyGroup):
+ test_prop = bpy.props.PointerProperty(type=bpy.types.Object)
+ name = bpy.props.StringProperty()
+
+
+def get_scene(lib_name, sce_name):
+ for s in bpy.data.scenes:
+ if s.name == sce_name:
+ if (s.library and s.library.name == lib_name) or \
+ (lib_name == None and s.library == None):
+ return s
+
+
+def check_crash(fnc, args=None):
+ try:
+ fnc(args) if args else fnc()
+ except:
+ return
+ print_fail_msg_and_exit("test failed")
+
+
+def init():
+ bpy.utils.register_class(TestClass)
+ bpy.types.Object.prop_array = bpy.props.CollectionProperty(
+ name="prop_array",
+ type=TestClass)
+ bpy.types.Object.prop = bpy.props.PointerProperty(type=bpy.types.Object)
+
+
+def make_lib():
+ bpy.ops.wm.read_factory_settings()
+
+ # datablock pointer to the Camera object
+ bpy.data.objects["Cube"].prop = bpy.data.objects['Camera']
+
+ # array of datablock pointers to the Lamp object
+ for i in range(0, arr_len):
+ a = bpy.data.objects["Cube"].prop_array.add()
+ a.test_prop = bpy.data.objects['Lamp']
+ a.name = a.test_prop.name
+
+ # make unique named copy of the cube
+ ob = bpy.data.objects["Cube"].copy()
+ bpy.context.scene.objects.link(ob)
+
+ bpy.data.objects["Cube.001"].name = "Unique_Cube"
+
+ # duplicating of Cube
+ for i in range(0, ob_cp_count):
+ ob = bpy.data.objects["Cube"].copy()
+ bpy.context.scene.objects.link(ob)
+
+ # nodes
+ bpy.data.scenes["Scene"].use_nodes = True
+ bpy.data.scenes["Scene"].node_tree.nodes['Render Layers']["prop"] =\
+ bpy.data.objects['Camera']
+
+ # rename scene and save
+ bpy.data.scenes["Scene"].name = "Scene_lib"
+ bpy.ops.wm.save_as_mainfile(filepath=lib_path)
+
+
+def check_lib():
+ # check pointer
+ abort_if_false(bpy.data.objects["Cube"].prop == bpy.data.objects['Camera'])
+
+ # check array of pointers in duplicated object
+ for i in range(0, arr_len):
+ abort_if_false(bpy.data.objects["Cube.001"].prop_array[i].test_prop ==
+ bpy.data.objects['Lamp'])
+
+
+def check_lib_linking():
+ # open startup file
+ bpy.ops.wm.read_factory_settings()
+
+ # link scene to the startup file
+ with bpy.data.libraries.load(lib_path, link=True) as (data_from, data_to):
+ data_to.scenes = ["Scene_lib"]
+
+ o = bpy.data.scenes["Scene_lib"].objects['Unique_Cube']
+
+ abort_if_false(o.prop_array[0].test_prop == bpy.data.scenes["Scene_lib"].objects['Lamp'])
+ abort_if_false(o.prop == bpy.data.scenes["Scene_lib"].objects['Camera'])
+ abort_if_false(o.prop.library == o.library)
+
+ bpy.ops.wm.save_as_mainfile(filepath=test_path)
+
+
+def check_linked_scene_copying():
+ # full copy of the scene with datablock props
+ bpy.ops.wm.open_mainfile(filepath=test_path)
+ bpy.data.screens['Default'].scene = bpy.data.scenes["Scene_lib"]
+ bpy.ops.scene.new(type='FULL_COPY')
+
+ # check save/open
+ bpy.ops.wm.save_as_mainfile(filepath=test_path)
+ bpy.ops.wm.open_mainfile(filepath=test_path)
+
+ intern_sce = get_scene(None, "Scene_lib")
+ extern_sce = get_scene("Lib", "Scene_lib")
+
+ # check node's props
+ # we made full copy from linked scene, so pointers must equal each other
+ abort_if_false(intern_sce.node_tree.nodes['Render Layers']["prop"] and
+ intern_sce.node_tree.nodes['Render Layers']["prop"] ==
+ extern_sce.node_tree.nodes['Render Layers']["prop"])
+
+
+def check_scene_copying():
+ # full copy of the scene with datablock props
+ bpy.ops.wm.open_mainfile(filepath=lib_path)
+ bpy.data.screens['Default'].scene = bpy.data.scenes["Scene_lib"]
+ bpy.ops.scene.new(type='FULL_COPY')
+
+ path = test_path + "_"
+ # check save/open
+ bpy.ops.wm.save_as_mainfile(filepath=path)
+ bpy.ops.wm.open_mainfile(filepath=path)
+
+ first_sce = get_scene(None, "Scene_lib")
+ second_sce = get_scene(None, "Scene_lib.001")
+
+ # check node's props
+ # must point to own scene camera
+ abort_if_false(not (first_sce.node_tree.nodes['Render Layers']["prop"] ==
+ second_sce.node_tree.nodes['Render Layers']["prop"]))
+
+
+# count users
+def test_users_counting():
+ bpy.ops.wm.read_factory_settings()
+ lamp_us = bpy.data.objects["Lamp"].data.users
+ n = 1000
+ for i in range(0, n):
+ bpy.data.objects["Cube"]["a%s" % i] = bpy.data.objects["Lamp"].data
+ abort_if_false(bpy.data.objects["Lamp"].data.users == lamp_us + n)
+
+ for i in range(0, int(n / 2)):
+ bpy.data.objects["Cube"]["a%s" % i] = 1
+ abort_if_false(bpy.data.objects["Lamp"].data.users == lamp_us + int(n / 2))
+
+
+# linking
+def test_linking():
+ make_lib()
+ check_lib()
+ check_lib_linking()
+ check_linked_scene_copying()
+ check_scene_copying()
+
+
+# check restrictions for datablock pointers for some classes; GUI for manual testing
+def test_restrictions1():
+ class TEST_Op(bpy.types.Operator):
+ bl_idname = 'scene.test_op'
+ bl_label = 'Test'
+ bl_options = {"INTERNAL"}
+ str_prop = bpy.props.StringProperty(name="str_prop")
+
+ # disallow registration of datablock properties in operators
+ # will be checked in the draw method (test manually)
+ # also, see console:
+ # ValueError: bpy_struct "SCENE_OT_test_op" doesn't support datablock properties
+ id_prop = bpy.props.PointerProperty(type=bpy.types.Object)
+
+ def execute(self, context):
+ return {'FINISHED'}
+
+ # just panel for testing the poll callback with lots of objects
+ class TEST_PT_DatablockProp(bpy.types.Panel):
+ bl_label = "Datablock IDProp"
+ bl_space_type = "PROPERTIES"
+ bl_region_type = "WINDOW"
+ bl_context = "render"
+
+ def draw(self, context):
+ self.layout.prop_search(context.scene, "prop", bpy.data,
+ "objects")
+ self.layout.template_ID(context.scene, "prop1")
+ self.layout.prop_search(context.scene, "prop2", bpy.data, "node_groups")
+
+ op = self.layout.operator("scene.test_op")
+ op.str_prop = "test string"
+
+ def test_fnc(op):
+ op["ob"] = bpy.data.objects['Unique_Cube']
+ check_crash(test_fnc, op)
+ abort_if_false(not hasattr(op, "id_prop"))
+
+ bpy.utils.register_class(TEST_PT_DatablockProp)
+ bpy.utils.register_class(TEST_Op)
+
+ def poll(self, value):
+ return value.name in bpy.data.scenes["Scene_lib"].objects
+
+ def poll1(self, value):
+ return True
+
+ bpy.types.Scene.prop = bpy.props.PointerProperty(type=bpy.types.Object)
+ bpy.types.Scene.prop1 = bpy.props.PointerProperty(type=bpy.types.Object, poll=poll)
+ bpy.types.Scene.prop2 = bpy.props.PointerProperty(type=bpy.types.NodeTree, poll=poll1)
+
+ # check poll effect on UI (poll returns false => red alert)
+ bpy.context.scene.prop = bpy.data.objects["Lamp.001"]
+ bpy.context.scene.prop1 = bpy.data.objects["Lamp.001"]
+
+ # check incorrect type assignment
+ def sub_test():
+ # NodeTree id_prop
+ bpy.context.scene.prop2 = bpy.data.objects["Lamp.001"]
+
+ check_crash(sub_test)
+
+ bpy.context.scene.prop2 = bpy.data.node_groups.new("Shader", "ShaderNodeTree")
+
+ print("Please, test GUI performance manually on the Render tab, '%s' panel" %
+ TEST_PT_DatablockProp.bl_label, file=sys.stderr)
+ sys.stderr.flush()
+
+
+# check some possible regressions
+def test_regressions():
+ bpy.types.Object.prop_str = bpy.props.StringProperty(name="str")
+ bpy.data.objects["Unique_Cube"].prop_str = "test"
+
+ bpy.types.Object.prop_gr = bpy.props.PointerProperty(
+ name="prop_gr",
+ type=TestClass,
+ description="test")
+
+ bpy.data.objects["Unique_Cube"].prop_gr = None
+
+
+# test restrictions for datablock pointers
+def test_restrictions2():
+ class TestClassCollection(bpy.types.PropertyGroup):
+ prop = bpy.props.CollectionProperty(
+ name="prop_array",
+ type=TestClass)
+ bpy.utils.register_class(TestClassCollection)
+
+ class TestPrefs(bpy.types.AddonPreferences):
+ bl_idname = "testprefs"
+ # expecting crash during registering
+ my_prop2 = bpy.props.PointerProperty(type=TestClass)
+
+ prop = bpy.props.PointerProperty(
+ name="prop",
+ type=TestClassCollection,
+ description="test")
+
+ bpy.types.Addon.a = bpy.props.PointerProperty(type=bpy.types.Object)
+
+ class TestUIList(UIList):
+ test = bpy.props.PointerProperty(type=bpy.types.Object)
+ def draw_item(self, context, layout, data, item, icon, active_data, active_propname, index):
+ layout.prop(item, "name", text="", emboss=False, icon_value=icon)
+
+ check_crash(bpy.utils.register_class, TestPrefs)
+ check_crash(bpy.utils.register_class, TestUIList)
+
+ bpy.utils.unregister_class(TestClassCollection)
+
+
+def main():
+ init()
+ test_users_counting()
+ test_linking()
+ test_restrictions1()
+ check_crash(test_regressions)
+ test_restrictions2()
+
+
+if __name__ == "__main__":
+ try:
+ main()
+ except:
+ import traceback
+
+ traceback.print_exc()
+ sys.stderr.flush()
+ os._exit(1)
diff --git a/tests/python/pep8.py b/tests/python/pep8.py
index 0e6250f534b..dde4250f6aa 100644
--- a/tests/python/pep8.py
+++ b/tests/python/pep8.py
@@ -178,5 +178,6 @@ def main():
"--max-line-length=1000"
" '%s'" % f)
+
if __name__ == "__main__":
main()
diff --git a/tests/python/rna_info_dump.py b/tests/python/rna_info_dump.py
index c26d94a1246..da228e52652 100644
--- a/tests/python/rna_info_dump.py
+++ b/tests/python/rna_info_dump.py
@@ -127,5 +127,6 @@ def api_dump(use_properties=True, use_functions=True):
print("END")
+
if __name__ == "__main__":
api_dump()