Welcome to mirror list, hosted at ThFree Co, Russian Federation.

bl_rna_manual_reference.py « python « tests - git.blender.org/blender.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
blob: 958cc46ae29ab521fe3b2dbff6f2592e7d164b93 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
# SPDX-License-Identifier: GPL-2.0-or-later

# Use for validating our manual interlinking.
#  ./blender.bin --background -noaudio --python tests/python/bl_rna_manual_reference.py
#
# 1) test_data()              -- ensure the data we have is correct format
# 2) test_lookup_coverage()   -- ensure that we have lookups for _every_ RNA path
# 3) test_urls()              -- ensure all the URL's are correct
# 4) test_language_coverage() -- ensure language lookup table is complete
#

import bpy


def test_data():
    import rna_manual_reference

    assert isinstance(rna_manual_reference.url_manual_mapping, tuple)
    for i, value in enumerate(rna_manual_reference.url_manual_mapping):
        try:
            assert len(value) == 2
            assert isinstance(value[0], str)
            assert isinstance(value[1], str)
        except:
            print("Expected a tuple of 2 strings, instead item %d is a %s: %r" % (i, type(value), value))
            import traceback
            traceback.print_exc()
            raise


# a stripped down version of api_dump() in rna_info_dump.py
def test_lookup_coverage():

    def rna_ids():
        import rna_info
        struct = rna_info.BuildRNAInfo()[0]
        for struct_id, v in sorted(struct.items()):
            props = [(prop.identifier, prop) for prop in v.properties]
            struct_path = "bpy.types.%s" % struct_id[1]
            for prop_id, prop in props:
                yield (struct_path, "%s.%s" % (struct_path, prop_id))

        for submod_id in dir(bpy.ops):
            op_path = "bpy.ops.%s" % submod_id
            for op_id in dir(getattr(bpy.ops, submod_id)):
                yield (op_path, "%s.%s" % (op_path, op_id))

    # check coverage
    from bl_operators import wm

    set_group_all = set()
    set_group_doc = set()

    for rna_group, rna_id in rna_ids():
        url = wm.WM_OT_doc_view_manual._lookup_rna_url(rna_id, verbose=False)
        print(rna_id, "->", url)

        set_group_all.add(rna_group)
        if url is not None:
            set_group_doc.add(rna_group)

    # finally report undocumented groups
    print("")
    print("---------------------")
    print("Undocumented Sections")

    for rna_group in sorted(set_group_all):
        if rna_group not in set_group_doc:
            print("%s.*" % rna_group)


def test_language_coverage():
    pass  # TODO


def test_urls():
    import os
    import sys
    import rna_manual_reference

    import urllib.error
    from urllib.request import urlopen

    # avoid URL lookups if possible
    LOCAL_PREFIX = os.environ.get("LOCAL_PREFIX")
    if LOCAL_PREFIX is None:
        prefix = rna_manual_reference.url_manual_prefix

    urls = {suffix for (rna_id, suffix) in rna_manual_reference.url_manual_mapping}

    urls_len = "%d" % len(urls)
    print("")
    print("-------------" + "-" * len(urls_len))
    print("Testing URLS %s" % urls_len)
    print("")

    color_red = '\033[0;31m'
    color_green = '\033[1;32m'
    color_normal = '\033[0m'

    urls_fail = []

    if LOCAL_PREFIX:
        for url in sorted(urls):
            url_full = os.path.join(LOCAL_PREFIX, url.partition("#")[0])
            print("  %s ... " % url_full, end="")
            if os.path.exists(url_full):
                print(color_green + "OK" + color_normal)
            else:
                print(color_red + "FAIL!" + color_normal)
                urls_fail.append(url)
    else:
        for url in sorted(urls):
            url_full = prefix + url
            print("  %s ... " % url_full, end="")
            sys.stdout.flush()
            try:
                urlopen(url_full)
                print(color_green + "OK" + color_normal)
            except urllib.error.HTTPError:
                print(color_red + "FAIL!" + color_normal)
                urls_fail.append(url)

    if urls_fail:
        urls_len = "%d" % len(urls_fail)
        print("")
        print("------------" + "-" * len(urls_len))
        print("Failed URLS %s" % urls_len)
        print("")
        for url in urls_fail:
            print("  %s%s%s" % (color_red, url, color_normal))


def main():
    test_data()
    test_lookup_coverage()
    test_language_coverage()
    test_urls()


if __name__ == "__main__":
    main()