Welcome to mirror list, hosted at ThFree Co, Russian Federation.

rebuild_dependencies.py « letsencrypt-auto-source - github.com/certbot/certbot.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
blob: 6d1ec15ffdbf472d55014abc3526976b1d10f821 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
#!/usr/bin/env python
"""
Gather and consolidate the up-to-date dependencies available and required to install certbot
on various Linux distributions. It generates a requirements file contained the pinned and hashed
versions, ready to be used by pip to install the certbot dependencies.

This script is typically used to update the certbot-requirements.txt file of certbot-auto.

To achieve its purpose, this script will start a certbot installation with unpinned dependencies,
then gather them, on various distributions started as Docker containers.

Usage: letsencrypt-auto-source/rebuild_dependencies new_requirements.txt

NB1: Docker must be installed on the machine running this script.
NB2: Python library 'hashin' must be installed on the machine running this script.
"""
from __future__ import print_function
import re
import shutil
import subprocess
import tempfile
import os
from os.path import dirname, abspath, join
import sys
import argparse

# The list of docker distributions to test dependencies against with.
DISTRIBUTION_LIST = [
    'ubuntu:18.04', 'ubuntu:16.04',
    'debian:stretch', 'debian:jessie',
    'centos:7', 'centos:6',
    'opensuse/leap:15',
    'fedora:29',
]

# These constraints will be added while gathering dependencies on each distribution.
# It can be used because a particular version for a package is required for any reason,
# or to solve a version conflict between two distributions requirements.
AUTHORITATIVE_CONSTRAINTS = {
    # Using an older version of mock here prevents regressions of #5276.
    'mock': '1.3.0',
    # Too touchy to move to a new version. And will be removed soon
    # in favor of pure python parser for Apache.
    'python-augeas': '0.5.0',
    # Package enum34 needs to be explicitly limited to Python2.x, in order to avoid
    # certbot-auto failures on Python 3.6+ which enum34 doesn't support. See #5456.
    # TODO: hashin seems to overwrite environment markers in dependencies. This needs to be fixed.
    'enum34': '1.1.6 ; python_version < \'3.4\'',
}


# ./certbot/letsencrypt-auto-source/rebuild_dependencies.py (2 levels from certbot root path)
CERTBOT_REPO_PATH = dirname(dirname(abspath(__file__)))

# The script will be used to gather dependencies for a given distribution.
#   - certbot-auto is used to install relevant OS packages, and set up an initial venv
#   - then this venv is used to consistently construct an empty new venv
#   - once pipstraped, this new venv pip-installs certbot runtime (including apache/nginx),
#     without pinned dependencies, and respecting input authoritative requirements
#   - `certbot plugins` is called to check we have a healthy environment
#   - finally current set of dependencies is extracted out of the docker using pip freeze
SCRIPT = r"""#!/bin/sh
set -e

cd /tmp/certbot
letsencrypt-auto-source/letsencrypt-auto --install-only -n
PYVER=`/opt/eff.org/certbot/venv/bin/python --version 2>&1 | cut -d" " -f 2 | cut -d. -f1,2 | sed 's/\.//'`

/opt/eff.org/certbot/venv/bin/python letsencrypt-auto-source/pieces/create_venv.py /tmp/venv "$PYVER" 1

/tmp/venv/bin/python letsencrypt-auto-source/pieces/pipstrap.py
/tmp/venv/bin/pip install -e acme -e certbot -e certbot-apache -e certbot-nginx -c /tmp/constraints.txt
/tmp/venv/bin/certbot plugins
/tmp/venv/bin/pip freeze >> /tmp/workspace/requirements.txt
"""


def _read_from(file):
    """Read all content of the file, and return it as a string."""
    with open(file, 'r') as file_h:
        return file_h.read()


def _write_to(file, content):
    """Write given string content to the file, overwriting its initial content."""
    with open(file, 'w') as file_h:
        file_h.write(content)


def _requirements_from_one_distribution(distribution, verbose):
    """
    Calculate the Certbot dependencies expressed for the given distribution, using the official
    Docker for this distribution, and return the lines of the generated requirements file.
    """
    print('===> Gathering dependencies for {0}.'.format(distribution))
    workspace = tempfile.mkdtemp()
    script = join(workspace, 'script.sh')
    authoritative_constraints = join(workspace, 'constraints.txt')
    cid_file = join(workspace, 'cid')

    try:
        _write_to(script, SCRIPT)
        os.chmod(script, 0o755)

        _write_to(authoritative_constraints, '\n'.join(
            ['{0}=={1}'.format(package, version) for package, version in AUTHORITATIVE_CONSTRAINTS.items()]))

        command = ['docker', 'run', '--rm', '--cidfile', cid_file,
                   '-v', '{0}:/tmp/certbot'.format(CERTBOT_REPO_PATH),
                   '-v', '{0}:/tmp/workspace'.format(workspace),
                   '-v', '{0}:/tmp/constraints.txt'.format(authoritative_constraints),
                   distribution, '/tmp/workspace/script.sh']
        sub_stdout = sys.stdout if verbose else subprocess.PIPE
        sub_stderr = sys.stderr if verbose else subprocess.STDOUT
        process = subprocess.Popen(command, stdout=sub_stdout, stderr=sub_stderr, universal_newlines=True)
        stdoutdata, _ = process.communicate()

        if process.returncode:
            if stdoutdata:
                sys.stderr.write('Output was:\n{0}'.format(stdoutdata))
            raise RuntimeError('Error while gathering dependencies for {0}.'.format(distribution))

        with open(join(workspace, 'requirements.txt'), 'r') as file_h:
            return file_h.readlines()
    finally:
        if os.path.isfile(cid_file):
            cid = _read_from(cid_file)
            try:
                subprocess.check_output(['docker', 'kill', cid], stderr=subprocess.PIPE)
            except subprocess.CalledProcessError:
                pass
        shutil.rmtree(workspace)


def _parse_and_merge_requirements(dependencies_map, requirements_file_lines, distribution):
    """
    Extract every requirement from the given requirements file, and merge it in the dependency map.
    Merging here means that the map contain every encountered dependency, and the version used in
    each distribution.

    Example:
    # dependencies_map = {
    # }
    _parse_and_merge_requirements(['cryptography=='1.2','requests=='2.1.0'], dependencies_map, 'debian:stretch')
    # dependencies_map = {
    #   'cryptography': [('1.2', 'debian:stretch)],
    #   'requests': [('2.1.0', 'debian:stretch')]
    # }
    _parse_and_merge_requirements(['requests=='2.4.0', 'mock==1.3'], dependencies_map, 'centos:7')
    # dependencies_map = {
    #   'cryptography': [('1.2', 'debian:stretch)],
    #   'requests': [('2.1.0', 'debian:stretch'), ('2.4.0', 'centos:7')],
    #   'mock': [('2.4.0', 'centos:7')]
    # }
    """
    for line in requirements_file_lines:
        match = re.match(r'([^=]+)==([^=]+)', line.strip())
        if not line.startswith('-e') and match:
            package, version = match.groups()
            if package not in ['acme', 'certbot', 'certbot-apache', 'certbot-nginx', 'pkg-resources']:
                dependencies_map.setdefault(package, []).append((version, distribution))


def _consolidate_and_validate_dependencies(dependency_map):
    """
    Given the dependency map of all requirements found in all distributions for Certbot,
    construct an array containing the unit requirements for Certbot to be used by pip,
    and the version conflicts, if any, between several distributions for a package.
    Return requirements and conflicts as a tuple.
    """
    print('===> Consolidate and validate the dependency map.')
    requirements = []
    conflicts = []
    for package, versions in dependency_map.items():
        reduced_versions = _reduce_versions(versions)

        if len(reduced_versions) > 1:
            version_list = ['{0} ({1})'.format(version, ','.join(distributions))
                            for version, distributions in reduced_versions.items()]
            conflict = ('package {0} is declared with several versions: {1}'
                        .format(package, ', '.join(version_list)))
            conflicts.append(conflict)
            sys.stderr.write('ERROR: {0}\n'.format(conflict))
        else:
            requirements.append((package, list(reduced_versions)[0]))

    requirements.sort(key=lambda x: x[0])
    return requirements, conflicts


def _reduce_versions(version_dist_tuples):
    """
    Get an array of version/distribution tuples,
    and reduce it to a map based on the version values.

    Example: [('1.2.0', 'debian:stretch'), ('1.4.0', 'ubuntu:18.04'), ('1.2.0', 'centos:6')]
              => {'1.2.0': ['debiqn:stretch', 'centos:6'], '1.4.0': ['ubuntu:18.04']}
    """
    version_dist_map = {}
    for version, distribution in version_dist_tuples:
        version_dist_map.setdefault(version, []).append(distribution)

    return version_dist_map


def _write_requirements(dest_file, requirements, conflicts):
    """
    Given the list of requirements and conflicts, write a well-formatted requirements file,
    whose requirements are hashed signed using hashin library. Conflicts are written at the end
    of the generated file.
    """
    print('===> Calculating hashes for the requirement file.')

    _write_to(dest_file, '''\
# This is the flattened list of packages certbot-auto installs.
# To generate this, do (with docker and package hashin installed):
# ```
# letsencrypt-auto-source/rebuild_dependencies.py \\
#   letsencrypt-auto-source/pieces/dependency-requirements.txt
# ```
# If you want to update a single dependency, run commands similar to these:
# ```
# pip install hashin
# hashin -r dependency-requirements.txt cryptography==1.5.2
# ```
''')

    for req in requirements:
        subprocess.check_call(['hashin', '{0}=={1}'.format(req[0], req[1]),
                               '--requirements-file', dest_file])

    if conflicts:
        with open(dest_file, 'a') as file_h:
            file_h.write('\n## ! SOME ERRORS OCCURRED ! ##\n')
            file_h.write('\n'.join('# {0}'.format(conflict) for conflict in conflicts))
            file_h.write('\n')

    return _read_from(dest_file)


def _gather_dependencies(dest_file, verbose):
    """
    Main method of this script. Given a destination file path, will write the file
    containing the consolidated and hashed requirements for Certbot, validated
    against several Linux distributions.
    """
    dependencies_map = {}

    for distribution in DISTRIBUTION_LIST:
        requirements_file_lines = _requirements_from_one_distribution(distribution, verbose)
        _parse_and_merge_requirements(dependencies_map, requirements_file_lines, distribution)

    requirements, conflicts = _consolidate_and_validate_dependencies(dependencies_map)

    return _write_requirements(dest_file, requirements, conflicts)


if __name__ == '__main__':
    parser = argparse.ArgumentParser(
        description=('Build a sanitized, pinned and hashed requirements file for certbot-auto, '
                     'validated against several OS distributions using Docker.'))
    parser.add_argument('requirements_path',
                        help='path for the generated requirements file')
    parser.add_argument('--verbose', '-v', action='store_true',
                        help='verbose will display all output during docker execution')

    namespace = parser.parse_args()

    try:
        subprocess.check_output(['hashin', '--version'])
    except subprocess.CalledProcessError:
        raise RuntimeError('Python library hashin is not installed in the current environment.')

    try:
        subprocess.check_output(['docker', '--version'], stderr=subprocess.STDOUT)
    except subprocess.CalledProcessError:
        raise RuntimeError('Docker is not installed or accessible to current user.')

    file_content = _gather_dependencies(namespace.requirements_path, namespace.verbose)

    print(file_content)
    print('===> Rebuilt requirement file is available on path {0}'
          .format(abspath(namespace.requirements_path)))