1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
|
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# <pep8 compliant>
__all__ = (
"property_definition_from_data_path",
"decompose_data_path",
)
class _TokenizeDataPath:
"""
Class to split up tokens of a data-path.
Note that almost all access generates new objects with additional paths,
with the exception of iteration which is the intended way to access the resulting data."""
__slots__ = (
"data_path",
)
def __init__(self, attrs):
self.data_path = attrs
def __getattr__(self, attr):
return _TokenizeDataPath(self.data_path + ((".%s" % attr),))
def __getitem__(self, key):
return _TokenizeDataPath(self.data_path + (("[%r]" % (key,)),))
def __call__(self, *args, **kw):
value_str = ", ".join([
val for val in (
", ".join(repr(value) for value in args),
", ".join(["%s=%r" % (key, value) for key, value in kw.items()]),
) if val])
return _TokenizeDataPath(self.data_path + ('(%s)' % value_str, ))
def __iter__(self):
return iter(self.data_path)
def decompose_data_path(data_path):
"""
Return the components of a data path split into a list.
"""
ns = {"base": _TokenizeDataPath(())}
return list(eval("base" + data_path, ns, ns))
def property_definition_from_data_path(base, data_path):
"""
Return an RNA property definition from an object and a data path.
In Blender this is often used with ``context`` as the base and a
path that it references, for example ``.space_data.lock_camera``.
"""
data = decompose_data_path(data_path)
while data and (not data[-1].startswith(".")):
data.pop()
if (not data) or (not data[-1].startswith(".")) or (len(data) < 2):
return None
data_path_head = "".join(data[:-1])
data_path_tail = data[-1]
value_head = eval("base" + data_path_head)
value_head_rna = getattr(value_head, "bl_rna", None)
if value_head_rna is None:
return None
value_tail = value_head.bl_rna.properties.get(data_path_tail[1:])
if not value_tail:
return None
return value_tail
|