From c3d6fca3eb1f24616bb784865662adfa048f9bf9 Mon Sep 17 00:00:00 2001 From: Adrien Ferrand Date: Wed, 24 Feb 2021 00:29:52 +0100 Subject: Make certbot constraint file independent from certbot-auto + update cryptography (#8649) * Refactor to not depend on certbot-auto dependencies pinning anymore * Update constraints * Replaces references * Upgrade AWS dependencies pinning * Fix script * Fix Windows installer builds * Fixing sdists letstest script * Pin cryptography on 3.1.1 specifically for RHEL/CentOS 7 to avoid build failures during test_sdists test. * Finish fix * Fix VERSION_ID in RHEL 7 --- letsencrypt-auto-source/rebuild_dependencies.py | 290 ------------------------ snap/snapcraft.yaml | 6 +- tests/letstest/scripts/test_sdists.sh | 22 +- tools/certbot_constraints.txt | 262 +++++++++++++++++++++ tools/dev_constraints.txt | 6 +- tools/docker/core/Dockerfile | 4 - tools/pip_install.py | 6 +- tools/rebuild_certbot_constraints.py | 280 +++++++++++++++++++++++ tools/snap/generate_dnsplugins_all.sh | 2 +- windows-installer/construct.py | 18 +- 10 files changed, 584 insertions(+), 312 deletions(-) delete mode 100755 letsencrypt-auto-source/rebuild_dependencies.py create mode 100644 tools/certbot_constraints.txt create mode 100755 tools/rebuild_certbot_constraints.py diff --git a/letsencrypt-auto-source/rebuild_dependencies.py b/letsencrypt-auto-source/rebuild_dependencies.py deleted file mode 100755 index 864394661..000000000 --- a/letsencrypt-auto-source/rebuild_dependencies.py +++ /dev/null @@ -1,290 +0,0 @@ -#!/usr/bin/env python -""" -Gather and consolidate the up-to-date dependencies available and required to install certbot -on various Linux distributions. It generates a requirements file contained the pinned and hashed -versions, ready to be used by pip to install the certbot dependencies. - -This script is typically used to update the certbot-requirements.txt file of certbot-auto. - -To achieve its purpose, this script will start a certbot installation with unpinned dependencies, -then gather them, on various distributions started as Docker containers. - -Usage: letsencrypt-auto-source/rebuild_dependencies new_requirements.txt - -NB1: Docker must be installed on the machine running this script. -NB2: Python library 'hashin' must be installed on the machine running this script. -""" -from __future__ import print_function -import re -import shutil -import subprocess -import tempfile -import os -from os.path import dirname, abspath, join -import sys -import argparse - -# The list of docker distributions to test dependencies against with. -DISTRIBUTION_LIST = [ - 'ubuntu:18.04', 'ubuntu:16.04', - 'debian:stretch', - 'centos:7', 'centos:6', - 'opensuse/leap:15', - 'fedora:29', -] - -# These constraints will be added while gathering dependencies on each distribution. -# It can be used because a particular version for a package is required for any reason, -# or to solve a version conflict between two distributions requirements. -AUTHORITATIVE_CONSTRAINTS = { - # Using an older version of mock here prevents regressions of #5276. - 'mock': '1.3.0', - # Too touchy to move to a new version. And will be removed soon - # in favor of pure python parser for Apache. - 'python-augeas': '0.5.0', - # Package enum34 needs to be explicitly limited to Python2.x, in order to avoid - # certbot-auto failures on Python 3.6+ which enum34 doesn't support. See #5456. - 'enum34': '1.1.10; python_version < \'3.4\'', - # Cryptography 2.9+ drops support for OpenSSL 1.0.1, but we still want to support it - # for officially supported non-x86_64 ancient distributions like RHEL 6. - 'cryptography': '2.8', - # Parsedatetime 2.6 is broken on Python 2.7, see https://github.com/bear/parsedatetime/issues/246 - 'parsedatetime': '2.5', -} - -# ./certbot/letsencrypt-auto-source/rebuild_dependencies.py (2 levels from certbot root path) -CERTBOT_REPO_PATH = dirname(dirname(abspath(__file__))) - -# The script will be used to gather dependencies for a given distribution. -# - certbot-auto is used to install relevant OS packages, and set up an initial venv -# - then this venv is used to consistently construct an empty new venv -# - once pipstraped, this new venv pip-installs certbot runtime (including apache/nginx), -# without pinned dependencies, and respecting input authoritative requirements -# - `certbot plugins` is called to check we have a healthy environment -# - finally current set of dependencies is extracted out of the docker using pip freeze -SCRIPT = r"""#!/bin/sh -set -e - -cd /tmp/certbot -letsencrypt-auto-source/letsencrypt-auto --install-only -n -PYVER=`/opt/eff.org/certbot/venv/bin/python --version 2>&1 | cut -d" " -f 2 | cut -d. -f1,2 | sed 's/\.//'` - -/opt/eff.org/certbot/venv/bin/python letsencrypt-auto-source/pieces/create_venv.py /tmp/venv "$PYVER" 1 - -/tmp/venv/bin/python letsencrypt-auto-source/pieces/pipstrap.py -/tmp/venv/bin/pip install -e acme -e certbot -e certbot-apache -e certbot-nginx -c /tmp/constraints.txt -/tmp/venv/bin/certbot plugins -/tmp/venv/bin/pip freeze >> /tmp/workspace/requirements.txt -""" - - -def _read_from(file): - """Read all content of the file, and return it as a string.""" - with open(file, 'r') as file_h: - return file_h.read() - - -def _write_to(file, content): - """Write given string content to the file, overwriting its initial content.""" - with open(file, 'w') as file_h: - file_h.write(content) - - -def _requirements_from_one_distribution(distribution, verbose): - """ - Calculate the Certbot dependencies expressed for the given distribution, using the official - Docker for this distribution, and return the lines of the generated requirements file. - """ - print('===> Gathering dependencies for {0}.'.format(distribution)) - workspace = tempfile.mkdtemp() - script = join(workspace, 'script.sh') - authoritative_constraints = join(workspace, 'constraints.txt') - cid_file = join(workspace, 'cid') - - try: - _write_to(script, SCRIPT) - os.chmod(script, 0o755) - - _write_to(authoritative_constraints, '\n'.join( - '{0}=={1}'.format(package, version) for package, version in AUTHORITATIVE_CONSTRAINTS.items())) - - command = ['docker', 'run', '--rm', '--cidfile', cid_file, - '-v', '{0}:/tmp/certbot'.format(CERTBOT_REPO_PATH), - '-v', '{0}:/tmp/workspace'.format(workspace), - '-v', '{0}:/tmp/constraints.txt'.format(authoritative_constraints), - distribution, '/tmp/workspace/script.sh'] - sub_stdout = sys.stdout if verbose else subprocess.PIPE - sub_stderr = sys.stderr if verbose else subprocess.STDOUT - process = subprocess.Popen(command, stdout=sub_stdout, stderr=sub_stderr, universal_newlines=True) - stdoutdata, _ = process.communicate() - - if process.returncode: - if stdoutdata: - sys.stderr.write('Output was:\n{0}'.format(stdoutdata)) - raise RuntimeError('Error while gathering dependencies for {0}.'.format(distribution)) - - with open(join(workspace, 'requirements.txt'), 'r') as file_h: - return file_h.readlines() - finally: - if os.path.isfile(cid_file): - cid = _read_from(cid_file) - try: - subprocess.check_output(['docker', 'kill', cid], stderr=subprocess.PIPE) - except subprocess.CalledProcessError: - pass - shutil.rmtree(workspace) - - -def _parse_and_merge_requirements(dependencies_map, requirements_file_lines, distribution): - """ - Extract every requirement from the given requirements file, and merge it in the dependency map. - Merging here means that the map contain every encountered dependency, and the version used in - each distribution. - - Example: - # dependencies_map = { - # } - _parse_and_merge_requirements(['cryptography=='1.2','requests=='2.1.0'], dependencies_map, 'debian:stretch') - # dependencies_map = { - # 'cryptography': [('1.2', 'debian:stretch)], - # 'requests': [('2.1.0', 'debian:stretch')] - # } - _parse_and_merge_requirements(['requests=='2.4.0', 'mock==1.3'], dependencies_map, 'centos:7') - # dependencies_map = { - # 'cryptography': [('1.2', 'debian:stretch)], - # 'requests': [('2.1.0', 'debian:stretch'), ('2.4.0', 'centos:7')], - # 'mock': [('2.4.0', 'centos:7')] - # } - """ - for line in requirements_file_lines: - match = re.match(r'([^=]+)==([^=]+)', line.strip()) - if not line.startswith('-e') and match: - package, version = match.groups() - if package not in ['acme', 'certbot', 'certbot-apache', 'certbot-nginx', 'pkg-resources']: - dependencies_map.setdefault(package, []).append((version, distribution)) - - -def _consolidate_and_validate_dependencies(dependency_map): - """ - Given the dependency map of all requirements found in all distributions for Certbot, - construct an array containing the unit requirements for Certbot to be used by pip, - and the version conflicts, if any, between several distributions for a package. - Return requirements and conflicts as a tuple. - """ - print('===> Consolidate and validate the dependency map.') - requirements = [] - conflicts = [] - for package, versions in dependency_map.items(): - reduced_versions = _reduce_versions(versions) - - if len(reduced_versions) > 1: - version_list = ['{0} ({1})'.format(version, ','.join(distributions)) - for version, distributions in reduced_versions.items()] - conflict = ('package {0} is declared with several versions: {1}' - .format(package, ', '.join(version_list))) - conflicts.append(conflict) - sys.stderr.write('ERROR: {0}\n'.format(conflict)) - else: - requirements.append((package, list(reduced_versions)[0])) - - requirements.sort(key=lambda x: x[0]) - return requirements, conflicts - - -def _reduce_versions(version_dist_tuples): - """ - Get an array of version/distribution tuples, - and reduce it to a map based on the version values. - - Example: [('1.2.0', 'debian:stretch'), ('1.4.0', 'ubuntu:18.04'), ('1.2.0', 'centos:6')] - => {'1.2.0': ['debiqn:stretch', 'centos:6'], '1.4.0': ['ubuntu:18.04']} - """ - version_dist_map = {} - for version, distribution in version_dist_tuples: - version_dist_map.setdefault(version, []).append(distribution) - - return version_dist_map - - -def _write_requirements(dest_file, requirements, conflicts): - """ - Given the list of requirements and conflicts, write a well-formatted requirements file, - whose requirements are hashed signed using hashin library. Conflicts are written at the end - of the generated file. - """ - print('===> Calculating hashes for the requirement file.') - - _write_to(dest_file, '''\ -# This is the flattened list of packages certbot-auto installs. -# To generate this, do (with docker and package hashin installed): -# ``` -# letsencrypt-auto-source/rebuild_dependencies.py \\ -# letsencrypt-auto-source/pieces/dependency-requirements.txt -# ``` -# If you want to update a single dependency, run commands similar to these: -# ``` -# pip install hashin -# hashin -r dependency-requirements.txt cryptography==1.5.2 -# ``` -''') - - for req in requirements: - if req[0] in AUTHORITATIVE_CONSTRAINTS: - # If requirement is in AUTHORITATIVE_CONSTRAINTS, take its value instead of the - # computed one to get any environment descriptor that would have been added. - req = (req[0], AUTHORITATIVE_CONSTRAINTS[req[0]]) - subprocess.check_call(['hashin', '{0}=={1}'.format(req[0], req[1]), - '--requirements-file', dest_file]) - - if conflicts: - with open(dest_file, 'a') as file_h: - file_h.write('\n## ! SOME ERRORS OCCURRED ! ##\n') - file_h.write('\n'.join('# {0}'.format(conflict) for conflict in conflicts)) - file_h.write('\n') - - return _read_from(dest_file) - - -def _gather_dependencies(dest_file, verbose): - """ - Main method of this script. Given a destination file path, will write the file - containing the consolidated and hashed requirements for Certbot, validated - against several Linux distributions. - """ - dependencies_map = {} - - for distribution in DISTRIBUTION_LIST: - requirements_file_lines = _requirements_from_one_distribution(distribution, verbose) - _parse_and_merge_requirements(dependencies_map, requirements_file_lines, distribution) - - requirements, conflicts = _consolidate_and_validate_dependencies(dependencies_map) - - return _write_requirements(dest_file, requirements, conflicts) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description=('Build a sanitized, pinned and hashed requirements file for certbot-auto, ' - 'validated against several OS distributions using Docker.')) - parser.add_argument('requirements_path', - help='path for the generated requirements file') - parser.add_argument('--verbose', '-v', action='store_true', - help='verbose will display all output during docker execution') - - namespace = parser.parse_args() - - try: - subprocess.check_output(['hashin', '--version']) - except subprocess.CalledProcessError: - raise RuntimeError('Python library hashin is not installed in the current environment.') - - try: - subprocess.check_output(['docker', '--version'], stderr=subprocess.STDOUT) - except subprocess.CalledProcessError: - raise RuntimeError('Docker is not installed or accessible to current user.') - - file_content = _gather_dependencies(namespace.requirements_path, namespace.verbose) - - print(file_content) - print('===> Rebuilt requirement file is available on path {0}' - .format(abspath(namespace.requirements_path))) diff --git a/snap/snapcraft.yaml b/snap/snapcraft.yaml index c9061ecb3..d53fba88b 100644 --- a/snap/snapcraft.yaml +++ b/snap/snapcraft.yaml @@ -73,10 +73,10 @@ parts: build-packages: [gcc, libffi-dev, libssl-dev, git, libaugeas-dev, python3-dev] build-environment: - SNAPCRAFT_PYTHON_VENV_ARGS: --upgrade - # Constraints are passed through the environment variable PIP_CONSTRAINTS instead of using the + # Constraints are passed through the environment variable PIP_CONSTRAINTS instead of using the # parts.[part_name].constraints option available in snapcraft.yaml when the Python plugin is # used. This is done to let these constraints be applied not only on the certbot package - # build, but also on any isolated build that pip could trigger when building wheels for + # build, but also on any isolated build that pip could trigger when building wheels for # dependencies. See https://github.com/certbot/certbot/pull/8443 for more info. - PIP_CONSTRAINT: $SNAPCRAFT_PART_SRC/snap-constraints.txt override-build: | @@ -85,7 +85,7 @@ parts: snapcraftctl build override-pull: | snapcraftctl pull - python3 "${SNAPCRAFT_PART_SRC}/tools/strip_hashes.py" "${SNAPCRAFT_PART_SRC}/letsencrypt-auto-source/pieces/dependency-requirements.txt" | grep -v python-augeas >> "${SNAPCRAFT_PART_SRC}/snap-constraints.txt" + python3 "${SNAPCRAFT_PART_SRC}/tools/strip_hashes.py" "${SNAPCRAFT_PART_SRC}/tools/certbot_constraints.txt" | grep -v python-augeas >> "${SNAPCRAFT_PART_SRC}/snap-constraints.txt" python3 "${SNAPCRAFT_PART_SRC}/tools/strip_hashes.py" "${SNAPCRAFT_PART_SRC}/tools/pipstrap_constraints.txt" >> "${SNAPCRAFT_PART_SRC}/snap-constraints.txt" echo "$(python3 "${SNAPCRAFT_PART_SRC}/tools/merge_requirements.py" "${SNAPCRAFT_PART_SRC}/snap-constraints.txt")" > "${SNAPCRAFT_PART_SRC}/snap-constraints.txt" snapcraftctl set-version `grep -oP "__version__ = '\K.*(?=')" "${SNAPCRAFT_PART_SRC}/certbot/certbot/__init__.py"` diff --git a/tests/letstest/scripts/test_sdists.sh b/tests/letstest/scripts/test_sdists.sh index aa12d5610..becdd6d9a 100755 --- a/tests/letstest/scripts/test_sdists.sh +++ b/tests/letstest/scripts/test_sdists.sh @@ -12,13 +12,21 @@ sudo $BOOTSTRAP_SCRIPT # We strip the hashes because the venv creation script includes unhashed # constraints in the commands given to pip and the mix of hashed and unhashed # packages makes pip error out. -python3 tools/strip_hashes.py letsencrypt-auto-source/pieces/dependency-requirements.txt > requirements.txt -# We also strip out the requirement for enum34 because it cannot be installed -# in newer versions of Python 3, tools/strip_hashes.py removes the environment -# marker that'd normally prevent it from being installed, and this package is -# not needed for any OS tested here. -sed -i '/enum34/d' requirements.txt -CERTBOT_PIP_NO_BINARY=:all: tools/venv.py --requirement requirements.txt +python3 tools/strip_hashes.py tools/pipstrap_constraints.txt > constraints.txt +python3 tools/strip_hashes.py tools/certbot_constraints.txt > requirements.txt + +# We pin cryptography to 3.1.1 and pyOpenSSL to 19.1.0 specifically for CentOS 7 / RHEL 7 +# because these systems ship only with OpenSSL 1.0.2, and this OpenSSL version support has been +# dropped on cryptography>=3.2 and pyOpenSSL>=20.0.0. +# Using this old version of OpenSSL would break the cryptography and pyOpenSSL wheels builds. +if [ -f /etc/redhat-release ] && [ "$(. /etc/os-release 2> /dev/null && echo "$VERSION_ID" | cut -d '.' -f1)" -eq 7 ]; then + sed -i 's|cryptography==.*|cryptography==3.1.1|g' requirements.txt + sed -i 's|pyOpenSSL==.*|pyOpenSSL==19.1.0|g' requirements.txt +fi + +python3 -m venv $VENV_PATH +$VENV_PATH/bin/python3 tools/pipstrap.py +PIP_CONSTRAINT=constraints.txt PIP_NO_BINARY=:all: $VENV_PATH/bin/python3 -m pip install --requirement requirements.txt . "$VENV_PATH/bin/activate" # pytest is needed to run tests on some of our packages so we install a pinned version here. tools/pip_install.py pytest diff --git a/tools/certbot_constraints.txt b/tools/certbot_constraints.txt new file mode 100644 index 000000000..77bfef9db --- /dev/null +++ b/tools/certbot_constraints.txt @@ -0,0 +1,262 @@ +# This is the flattened list of pinned packages to build certbot deployable artifacts. +# To generate this, do (with docker and package hashin installed): +# ``` +# tools/rebuild_certbot_contraints.py \ +# tools/certbot_constraints.txt +# ``` +# If you want to update a single dependency, run commands similar to these: +# ``` +# pip install hashin +# hashin -r dependency-requirements.txt cryptography==1.5.2 +# ``` +ConfigArgParse==1.2.3 \ + --hash=sha256:edd17be986d5c1ba2e307150b8e5f5107aba125f3574dddd02c85d5cdcfd37dc +certifi==2020.12.5 \ + --hash=sha256:1a4995114262bffbc2413b159f2a1a480c969de6e6eb13ee966d470af86af59c \ + --hash=sha256:719a74fb9e33b9bd44cc7f3a8d94bc35e4049deebe19ba7d8e108280cfd59830 +cffi==1.14.4 \ + --hash=sha256:00a1ba5e2e95684448de9b89888ccd02c98d512064b4cb987d48f4b40aa0421e \ + --hash=sha256:00e28066507bfc3fe865a31f325c8391a1ac2916219340f87dfad602c3e48e5d \ + --hash=sha256:045d792900a75e8b1e1b0ab6787dd733a8190ffcf80e8c8ceb2fb10a29ff238a \ + --hash=sha256:0638c3ae1a0edfb77c6765d487fee624d2b1ee1bdfeffc1f0b58c64d149e7eec \ + --hash=sha256:105abaf8a6075dc96c1fe5ae7aae073f4696f2905fde6aeada4c9d2926752362 \ + --hash=sha256:155136b51fd733fa94e1c2ea5211dcd4c8879869008fc811648f16541bf99668 \ + --hash=sha256:1a465cbe98a7fd391d47dce4b8f7e5b921e6cd805ef421d04f5f66ba8f06086c \ + --hash=sha256:1d2c4994f515e5b485fd6d3a73d05526aa0fcf248eb135996b088d25dfa1865b \ + --hash=sha256:2c24d61263f511551f740d1a065eb0212db1dbbbbd241db758f5244281590c06 \ + --hash=sha256:51a8b381b16ddd370178a65360ebe15fbc1c71cf6f584613a7ea08bfad946698 \ + --hash=sha256:594234691ac0e9b770aee9fcdb8fa02c22e43e5c619456efd0d6c2bf276f3eb2 \ + --hash=sha256:5cf4be6c304ad0b6602f5c4e90e2f59b47653ac1ed9c662ed379fe48a8f26b0c \ + --hash=sha256:64081b3f8f6f3c3de6191ec89d7dc6c86a8a43911f7ecb422c60e90c70be41c7 \ + --hash=sha256:6bc25fc545a6b3d57b5f8618e59fc13d3a3a68431e8ca5fd4c13241cd70d0009 \ + --hash=sha256:798caa2a2384b1cbe8a2a139d80734c9db54f9cc155c99d7cc92441a23871c03 \ + --hash=sha256:7c6b1dece89874d9541fc974917b631406233ea0440d0bdfbb8e03bf39a49b3b \ + --hash=sha256:7ef7d4ced6b325e92eb4d3502946c78c5367bc416398d387b39591532536734e \ + --hash=sha256:840793c68105fe031f34d6a086eaea153a0cd5c491cde82a74b420edd0a2b909 \ + --hash=sha256:8d6603078baf4e11edc4168a514c5ce5b3ba6e3e9c374298cb88437957960a53 \ + --hash=sha256:9cc46bc107224ff5b6d04369e7c595acb700c3613ad7bcf2e2012f62ece80c35 \ + --hash=sha256:9f7a31251289b2ab6d4012f6e83e58bc3b96bd151f5b5262467f4bb6b34a7c26 \ + --hash=sha256:9ffb888f19d54a4d4dfd4b3f29bc2c16aa4972f1c2ab9c4ab09b8ab8685b9c2b \ + --hash=sha256:a5ed8c05548b54b998b9498753fb9cadbfd92ee88e884641377d8a8b291bcc01 \ + --hash=sha256:a7711edca4dcef1a75257b50a2fbfe92a65187c47dab5a0f1b9b332c5919a3fb \ + --hash=sha256:af5c59122a011049aad5dd87424b8e65a80e4a6477419c0c1015f73fb5ea0293 \ + --hash=sha256:b18e0a9ef57d2b41f5c68beefa32317d286c3d6ac0484efd10d6e07491bb95dd \ + --hash=sha256:b4e248d1087abf9f4c10f3c398896c87ce82a9856494a7155823eb45a892395d \ + --hash=sha256:ba4e9e0ae13fc41c6b23299545e5ef73055213e466bd107953e4a013a5ddd7e3 \ + --hash=sha256:c6332685306b6417a91b1ff9fae889b3ba65c2292d64bd9245c093b1b284809d \ + --hash=sha256:d5ff0621c88ce83a28a10d2ce719b2ee85635e85c515f12bac99a95306da4b2e \ + --hash=sha256:d9efd8b7a3ef378dd61a1e77367f1924375befc2eba06168b6ebfa903a5e59ca \ + --hash=sha256:df5169c4396adc04f9b0a05f13c074df878b6052430e03f50e68adf3a57aa28d \ + --hash=sha256:ebb253464a5d0482b191274f1c8bf00e33f7e0b9c66405fbffc61ed2c839c775 \ + --hash=sha256:ec80dc47f54e6e9a78181ce05feb71a0353854cc26999db963695f950b5fb375 \ + --hash=sha256:f032b34669220030f905152045dfa27741ce1a6db3324a5bc0b96b6c7420c87b \ + --hash=sha256:f60567825f791c6f8a592f3c6e3bd93dd2934e3f9dac189308426bd76b00ef3b \ + --hash=sha256:f803eaa94c2fcda012c047e62bc7a51b0bdabda1cad7a92a522694ea2d76e49f +chardet==4.0.0 \ + --hash=sha256:0d6f53a15db4120f2b08c94f11e7d93d2c911ee118b6b30a04ec3ee8310179fa \ + --hash=sha256:f864054d66fd9118f2e67044ac8981a54775ec5b67aed0441892edb553d21da5 +configobj==5.0.6 \ + --hash=sha256:a2f5650770e1c87fb335af19a9b7eb73fc05ccf22144eb68db7d00cd2bcb0902 +cryptography==3.3.2 \ + --hash=sha256:0d7b69674b738068fa6ffade5c962ecd14969690585aaca0a1b1fc9058938a72 \ + --hash=sha256:1bd0ccb0a1ed775cd7e2144fe46df9dc03eefd722bbcf587b3e0616ea4a81eff \ + --hash=sha256:3c284fc1e504e88e51c428db9c9274f2da9f73fdf5d7e13a36b8ecb039af6e6c \ + --hash=sha256:49570438e60f19243e7e0d504527dd5fe9b4b967b5a1ff21cc12b57602dd85d3 \ + --hash=sha256:541dd758ad49b45920dda3b5b48c968f8b2533d8981bcdb43002798d8f7a89ed \ + --hash=sha256:5a60d3780149e13b7a6ff7ad6526b38846354d11a15e21068e57073e29e19bed \ + --hash=sha256:7951a966613c4211b6612b0352f5bf29989955ee592c4a885d8c7d0f830d0433 \ + --hash=sha256:922f9602d67c15ade470c11d616f2b2364950602e370c76f0c94c94ae672742e \ + --hash=sha256:a0f0b96c572fc9f25c3f4ddbf4688b9b38c69836713fb255f4a2715d93cbaf44 \ + --hash=sha256:a777c096a49d80f9d2979695b835b0f9c9edab73b59e4ceb51f19724dda887ed \ + --hash=sha256:a9a4ac9648d39ce71c2f63fe7dc6db144b9fa567ddfc48b9fde1b54483d26042 \ + --hash=sha256:aa4969f24d536ae2268c902b2c3d62ab464b5a66bcb247630d208a79a8098e9b \ + --hash=sha256:c7390f9b2119b2b43160abb34f63277a638504ef8df99f11cb52c1fda66a2e6f \ + --hash=sha256:e18e6ab84dfb0ab997faf8cca25a86ff15dfea4027b986322026cc99e0a892da +distro==1.5.0 \ + --hash=sha256:0e58756ae38fbd8fc3020d54badb8eae17c5b9dcbed388b17bb55b8a5928df92 \ + --hash=sha256:df74eed763e18d10d0da624258524ae80486432cd17392d9c3d96f5e83cd2799 +idna==2.10 \ + --hash=sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6 \ + --hash=sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0 +josepy==1.6.0 \ + --hash=sha256:0aab1c3ceffe045e7fd5bcfe7685e27e9d2758518d9ba7116b5de34087e70bf5 \ + --hash=sha256:65f077fc5902aca1e140ddb000e7abb081d5fb8421db60b6071076ef81c5bd27 +parsedatetime==2.6 \ + --hash=sha256:4cb368fbb18a0b7231f4d76119165451c8d2e35951455dfee97c62a87b04d455 \ + --hash=sha256:cb96edd7016872f58479e35879294258c71437195760746faffedb692aef000b +pyOpenSSL==20.0.1 \ + --hash=sha256:4c231c759543ba02560fcd2480c48dcec4dae34c9da7d3747c508227e0624b51 \ + --hash=sha256:818ae18e06922c066f777a33f1fca45786d85edfe71cd043de6379337a7f274b +pyRFC3339==1.1 \ + --hash=sha256:67196cb83b470709c580bb4738b83165e67c6cc60e1f2e4f286cfcb402a926f4 \ + --hash=sha256:81b8cbe1519cdb79bed04910dd6fa4e181faf8c88dff1e1b987b5f7ab23a5b1a +pycparser==2.20 \ + --hash=sha256:2d475327684562c3a96cc71adf7dc8c4f0565175cf86b6d7a404ff4c771f15f0 \ + --hash=sha256:7582ad22678f0fcd81102833f60ef8d0e57288b6b5fb00323d101be910e35705 +pyparsing==2.4.7 \ + --hash=sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1 \ + --hash=sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b +python-augeas==0.5.0 \ + --hash=sha256:67d59d66cdba8d624e0389b87b2a83a176f21f16a87553b50f5703b23f29bac2 +pytz==2021.1 \ + --hash=sha256:83a4a90894bf38e243cf052c8b58f381bfe9a7a483f6a9cab140bc7f702ac4da \ + --hash=sha256:eb10ce3e7736052ed3623d49975ce333bcd712c7bb19a58b9e2089d4057d0798 +requests==2.25.1 \ + --hash=sha256:27973dd4a904a4f13b263a19c866c13b92a39ed1c964655f025f3f8d3d75b804 \ + --hash=sha256:c210084e36a42ae6b9219e00e48287def368a26d03a048ddad7bfee44f75871e +requests-toolbelt==0.9.1 \ + --hash=sha256:380606e1d10dc85c3bd47bf5a6095f815ec007be7a8b69c878507068df059e6f \ + --hash=sha256:968089d4584ad4ad7c171454f0a5c6dac23971e9472521ea3b6d49d610aa6fc0 +six==1.15.0 \ + --hash=sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259 \ + --hash=sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced +urllib3==1.26.3 \ + --hash=sha256:1b465e494e3e0d8939b50680403e3aedaa2bc434b7d5af64dfd3c958d7f5ae80 \ + --hash=sha256:de3eedaad74a2683334e282005cd8d7f22f4d55fa690a2a1020a416cb0a47e73 +zope.component==4.6.2 \ + --hash=sha256:607628e4c84f7887a69a958542b5c304663e726b73aba0882e3a3f059bff14f3 \ + --hash=sha256:91628918218b3e6f6323de2a7b845e09ddc5cae131c034896c051b084bba3c92 +zope.deferredimport==4.3.1 \ + --hash=sha256:57b2345e7b5eef47efcd4f634ff16c93e4265de3dcf325afc7315ade48d909e1 \ + --hash=sha256:9a0c211df44aa95f1c4e6d2626f90b400f56989180d3ef96032d708da3d23e0a +zope.deprecation==4.4.0 \ + --hash=sha256:0d453338f04bacf91bbfba545d8bcdf529aa829e67b705eac8c1a7fdce66e2df \ + --hash=sha256:f1480b74995958b24ce37b0ef04d3663d2683e5d6debc96726eff18acf4ea113 +zope.event==4.5.0 \ + --hash=sha256:2666401939cdaa5f4e0c08cf7f20c9b21423b95e88f4675b1443973bdb080c42 \ + --hash=sha256:5e76517f5b9b119acf37ca8819781db6c16ea433f7e2062c4afc2b6fbedb1330 +zope.hookable==5.0.1 \ + --hash=sha256:0194b9b9e7f614abba60c90b231908861036578297515d3d6508eb10190f266d \ + --hash=sha256:0c2977473918bdefc6fa8dfb311f154e7f13c6133957fe649704deca79b92093 \ + --hash=sha256:17b8bdb3b77e03a152ca0d5ca185a7ae0156f5e5a2dbddf538676633a1f7380f \ + --hash=sha256:29d07681a78042cdd15b268ae9decffed9ace68a53eebeb61d65ae931d158841 \ + --hash=sha256:36fb1b35d1150267cb0543a1ddd950c0bc2c75ed0e6e92e3aaa6ac2e29416cb7 \ + --hash=sha256:3aed60c2bb5e812bbf9295c70f25b17ac37c233f30447a96c67913ba5073642f \ + --hash=sha256:3cac1565cc768911e72ca9ec4ddf5c5109e1fef0104f19f06649cf1874943b60 \ + --hash=sha256:3d4bc0cc4a37c3cd3081063142eeb2125511db3c13f6dc932d899c512690378e \ + --hash=sha256:3f73096f27b8c28be53ffb6604f7b570fbbb82f273c6febe5f58119009b59898 \ + --hash=sha256:522d1153d93f2d48aa0bd9fb778d8d4500be2e4dcf86c3150768f0e3adbbc4ef \ + --hash=sha256:523d2928fb7377bbdbc9af9c0b14ad73e6eaf226349f105733bdae27efd15b5a \ + --hash=sha256:5848309d4fc5c02150a45e8f8d2227e5bfda386a508bbd3160fed7c633c5a2fa \ + --hash=sha256:6781f86e6d54a110980a76e761eb54590630fd2af2a17d7edf02a079d2646c1d \ + --hash=sha256:6fd27921ebf3aaa945fa25d790f1f2046204f24dba4946f82f5f0a442577c3e9 \ + --hash=sha256:70d581862863f6bf9e175e85c9d70c2d7155f53fb04dcdb2f73cf288ca559a53 \ + --hash=sha256:81867c23b0dc66c8366f351d00923f2bc5902820a24c2534dfd7bf01a5879963 \ + --hash=sha256:81db29edadcbb740cd2716c95a297893a546ed89db1bfe9110168732d7f0afdd \ + --hash=sha256:86bd12624068cea60860a0759af5e2c3adc89c12aef6f71cf12f577e28deefe3 \ + --hash=sha256:9c184d8f9f7a76e1ced99855ccf390ffdd0ec3765e5cbf7b9cada600accc0a1e \ + --hash=sha256:acc789e8c29c13555e43fe4bf9fcd15a65512c9645e97bbaa5602e3201252b02 \ + --hash=sha256:afaa740206b7660d4cc3b8f120426c85761f51379af7a5b05451f624ad12b0af \ + --hash=sha256:b5f5fa323f878bb16eae68ea1ba7f6c0419d4695d0248bed4b18f51d7ce5ab85 \ + --hash=sha256:bd89e0e2c67bf4ac3aca2a19702b1a37269fb1923827f68324ac2e7afd6e3406 \ + --hash=sha256:c212de743283ec0735db24ec6ad913758df3af1b7217550ff270038062afd6ae \ + --hash=sha256:ca553f524293a0bdea05e7f44c3e685e4b7b022cb37d87bc4a3efa0f86587a8d \ + --hash=sha256:cab67065a3db92f636128d3157cc5424a145f82d96fb47159c539132833a6d36 \ + --hash=sha256:d3b3b3eedfdbf6b02898216e85aa6baf50207f4378a2a6803d6d47650cd37031 \ + --hash=sha256:d9f4a5a72f40256b686d31c5c0b1fde503172307beb12c1568296e76118e402c \ + --hash=sha256:df5067d87aaa111ed5d050e1ee853ba284969497f91806efd42425f5348f1c06 \ + --hash=sha256:e2587644812c6138f05b8a41594a8337c6790e3baf9a01915e52438c13fc6bef \ + --hash=sha256:e27fd877662db94f897f3fd532ef211ca4901eb1a70ba456f15c0866a985464a \ + --hash=sha256:e427ebbdd223c72e06ba94c004bb04e996c84dec8a0fa84e837556ae145c439e \ + --hash=sha256:e583ad4309c203ef75a09d43434cf9c2b4fa247997ecb0dcad769982c39411c7 \ + --hash=sha256:e760b2bc8ece9200804f0c2b64d10147ecaf18455a2a90827fbec4c9d84f3ad5 \ + --hash=sha256:ea9a9cc8bcc70e18023f30fa2f53d11ae069572a162791224e60cd65df55fb69 \ + --hash=sha256:ecb3f17dce4803c1099bd21742cd126b59817a4e76a6544d31d2cca6e30dbffd \ + --hash=sha256:ed794e3b3de42486d30444fb60b5561e724ee8a2d1b17b0c2e0f81e3ddaf7a87 \ + --hash=sha256:ee885d347279e38226d0a437b6a932f207f691c502ee565aba27a7022f1285df \ + --hash=sha256:fd5e7bc5f24f7e3d490698f7b854659a9851da2187414617cd5ed360af7efd63 \ + --hash=sha256:fe45f6870f7588ac7b2763ff1ce98cce59369717afe70cc353ec5218bc854bcc +zope.interface==5.2.0 \ + --hash=sha256:05a97ba92c1c7c26f25c9f671aa1ef85ffead6cdad13770e5b689cf983adc7e1 \ + --hash=sha256:07d61722dd7d85547b7c6b0f5486b4338001fab349f2ac5cabc0b7182eb3425d \ + --hash=sha256:0a990dcc97806e5980bbb54b2e46b9cde9e48932d8e6984daf71ef1745516123 \ + --hash=sha256:150e8bcb7253a34a4535aeea3de36c0bb3b1a6a47a183a95d65a194b3e07f232 \ + --hash=sha256:1743bcfe45af8846b775086471c28258f4c6e9ee8ef37484de4495f15a98b549 \ + --hash=sha256:1b5f6c8fff4ed32aa2dd43e84061bc8346f32d3ba6ad6e58f088fe109608f102 \ + --hash=sha256:21e49123f375703cf824214939d39df0af62c47d122d955b2a8d9153ea08cfd5 \ + --hash=sha256:21f579134a47083ffb5ddd1307f0405c91aa8b61ad4be6fd5af0171474fe0c45 \ + --hash=sha256:27c267dc38a0f0079e96a2945ee65786d38ef111e413c702fbaaacbab6361d00 \ + --hash=sha256:299bde0ab9e5c4a92f01a152b7fbabb460f31343f1416f9b7b983167ab1e33bc \ + --hash=sha256:2ab88d8f228f803fcb8cb7d222c579d13dab2d3622c51e8cf321280da01102a7 \ + --hash=sha256:2ced4c35061eea623bc84c7711eedce8ecc3c2c51cd9c6afa6290df3bae9e104 \ + --hash=sha256:2dcab01c660983ba5e5a612e0c935141ccbee67d2e2e14b833e01c2354bd8034 \ + --hash=sha256:32546af61a9a9b141ca38d971aa6eb9800450fa6620ce6323cc30eec447861f3 \ + --hash=sha256:32b40a4c46d199827d79c86bb8cb88b1bbb764f127876f2cb6f3a47f63dbada3 \ + --hash=sha256:3cc94c69f6bd48ed86e8e24f358cb75095c8129827df1298518ab860115269a4 \ + --hash=sha256:42b278ac0989d6f5cf58d7e0828ea6b5951464e3cf2ff229dd09a96cb6ba0c86 \ + --hash=sha256:495b63fd0302f282ee6c1e6ea0f1c12cb3d1a49c8292d27287f01845ff252a96 \ + --hash=sha256:4af87cdc0d4b14e600e6d3d09793dce3b7171348a094ba818e2a68ae7ee67546 \ + --hash=sha256:4b94df9f2fdde7b9314321bab8448e6ad5a23b80542dcab53e329527d4099dcb \ + --hash=sha256:4c48ddb63e2b20fba4c6a2bf81b4d49e99b6d4587fb67a6cd33a2c1f003af3e3 \ + --hash=sha256:4df9afd17bd5477e9f8c8b6bb8507e18dd0f8b4efe73bb99729ff203279e9e3b \ + --hash=sha256:518950fe6a5d56f94ba125107895f938a4f34f704c658986eae8255edb41163b \ + --hash=sha256:538298e4e113ccb8b41658d5a4b605bebe75e46a30ceca22a5a289cf02c80bec \ + --hash=sha256:55465121e72e208a7b69b53de791402affe6165083b2ea71b892728bd19ba9ae \ + --hash=sha256:588384d70a0f19b47409cfdb10e0c27c20e4293b74fc891df3d8eb47782b8b3e \ + --hash=sha256:6278c080d4afffc9016e14325f8734456831124e8c12caa754fd544435c08386 \ + --hash=sha256:64ea6c221aeee4796860405e1aedec63424cda4202a7ad27a5066876db5b0fd2 \ + --hash=sha256:681dbb33e2b40262b33fd383bae63c36d33fd79fa1a8e4092945430744ffd34a \ + --hash=sha256:6936aa9da390402d646a32a6a38d5409c2d2afb2950f045a7d02ab25a4e7d08d \ + --hash=sha256:778d0ec38bbd288b150a3ae363c8ffd88d2207a756842495e9bffd8a8afbc89a \ + --hash=sha256:8251f06a77985a2729a8bdbefbae79ee78567dddc3acbd499b87e705ca59fe24 \ + --hash=sha256:83b4aa5344cce005a9cff5d0321b2e318e871cc1dfc793b66c32dd4f59e9770d \ + --hash=sha256:844fad925ac5c2ad4faaceb3b2520ad016b5280105c6e16e79838cf951903a7b \ + --hash=sha256:8ceb3667dd13b8133f2e4d637b5b00f240f066448e2aa89a41f4c2d78a26ce50 \ + --hash=sha256:92dc0fb79675882d0b6138be4bf0cec7ea7c7eede60aaca78303d8e8dbdaa523 \ + --hash=sha256:9789bd945e9f5bd026ed3f5b453d640befb8b1fc33a779c1fe8d3eb21fe3fb4a \ + --hash=sha256:a2b6d6eb693bc2fc6c484f2e5d93bd0b0da803fa77bf974f160533e555e4d095 \ + --hash=sha256:aab9f1e34d810feb00bf841993552b8fcc6ae71d473c505381627143d0018a6a \ + --hash=sha256:abb61afd84f23099ac6099d804cdba9bd3b902aaaded3ffff47e490b0a495520 \ + --hash=sha256:adf9ee115ae8ff8b6da4b854b4152f253b390ba64407a22d75456fe07dcbda65 \ + --hash=sha256:aedc6c672b351afe6dfe17ff83ee5e7eb6ed44718f879a9328a68bdb20b57e11 \ + --hash=sha256:b7a00ecb1434f8183395fac5366a21ee73d14900082ca37cf74993cf46baa56c \ + --hash=sha256:ba32f4a91c1cb7314c429b03afbf87b1fff4fb1c8db32260e7310104bd77f0c7 \ + --hash=sha256:cbd0f2cbd8689861209cd89141371d3a22a11613304d1f0736492590aa0ab332 \ + --hash=sha256:e4bc372b953bf6cec65a8d48482ba574f6e051621d157cf224227dbb55486b1e \ + --hash=sha256:eccac3d9aadc68e994b6d228cb0c8919fc47a5350d85a1b4d3d81d1e98baf40c \ + --hash=sha256:efd550b3da28195746bb43bd1d815058181a7ca6d9d6aa89dd37f5eefe2cacb7 \ + --hash=sha256:efef581c8ba4d990770875e1a2218e856849d32ada2680e53aebc5d154a17e20 \ + --hash=sha256:f057897711a630a0b7a6a03f1acf379b6ba25d37dc5dc217a97191984ba7f2fc \ + --hash=sha256:f37d45fab14ffef9d33a0dc3bc59ce0c5313e2253323312d47739192da94f5fd \ + --hash=sha256:f44906f70205d456d503105023041f1e63aece7623b31c390a0103db4de17537 +zope.proxy==4.3.5 \ + --hash=sha256:00573dfa755d0703ab84bb23cb6ecf97bb683c34b340d4df76651f97b0bab068 \ + --hash=sha256:092049280f2848d2ba1b57b71fe04881762a220a97b65288bcb0968bb199ec30 \ + --hash=sha256:0cbd27b4d3718b5ec74fc65ffa53c78d34c65c6fd9411b8352d2a4f855220cf1 \ + --hash=sha256:17fc7e16d0c81f833a138818a30f366696653d521febc8e892858041c4d88785 \ + --hash=sha256:19577dfeb70e8a67249ba92c8ad20589a1a2d86a8d693647fa8385408a4c17b0 \ + --hash=sha256:207aa914576b1181597a1516e1b90599dc690c095343ae281b0772e44945e6a4 \ + --hash=sha256:219a7db5ed53e523eb4a4769f13105118b6d5b04ed169a283c9775af221e231f \ + --hash=sha256:2b50ea79849e46b5f4f2b0247a3687505d32d161eeb16a75f6f7e6cd81936e43 \ + --hash=sha256:5903d38362b6c716e66bbe470f190579c530a5baf03dbc8500e5c2357aa569a5 \ + --hash=sha256:5c24903675e271bd688c6e9e7df5775ac6b168feb87dbe0e4bcc90805f21b28f \ + --hash=sha256:5ef6bc5ed98139e084f4e91100f2b098a0cd3493d4e76f9d6b3f7b95d7ad0f06 \ + --hash=sha256:61b55ae3c23a126a788b33ffb18f37d6668e79a05e756588d9e4d4be7246ab1c \ + --hash=sha256:63ddb992931a5e616c87d3d89f5a58db086e617548005c7f9059fac68c03a5cc \ + --hash=sha256:6943da9c09870490dcfd50c4909c0cc19f434fa6948f61282dc9cb07bcf08160 \ + --hash=sha256:6ad40f85c1207803d581d5d75e9ea25327cd524925699a83dfc03bf8e4ba72b7 \ + --hash=sha256:6b44433a79bdd7af0e3337bd7bbcf53dd1f9b0fa66bf21bcb756060ce32a96c1 \ + --hash=sha256:6bbaa245015d933a4172395baad7874373f162955d73612f0b66b6c2c33b6366 \ + --hash=sha256:7007227f4ea85b40a2f5e5a244479f6a6dfcf906db9b55e812a814a8f0e2c28d \ + --hash=sha256:74884a0aec1f1609190ec8b34b5d58fb3b5353cf22b96161e13e0e835f13518f \ + --hash=sha256:7d25fe5571ddb16369054f54cdd883f23de9941476d97f2b92eb6d7d83afe22d \ + --hash=sha256:7e162bdc5e3baad26b2262240be7d2bab36991d85a6a556e48b9dfb402370261 \ + --hash=sha256:814d62678dc3a30f4aa081982d830b7c342cf230ffc9d030b020cb154eeebf9e \ + --hash=sha256:8878a34c5313ee52e20aa50b03138af8d472bae465710fb954d133a9bfd3c38d \ + --hash=sha256:a66a0d94e5b081d5d695e66d6667e91e74d79e273eee95c1747717ba9cb70792 \ + --hash=sha256:a69f5cbf4addcfdf03dda564a671040127a6b7c34cf9fe4973582e68441b63fa \ + --hash=sha256:b00f9f0c334d07709d3f73a7cb8ae63c6ca1a90c790a63b5e7effa666ef96021 \ + --hash=sha256:b6ed71e4a7b4690447b626f499d978aa13197a0e592950e5d7020308f6054698 \ + --hash=sha256:bdf5041e5851526e885af579d2f455348dba68d74f14a32781933569a327fddf \ + --hash=sha256:be034360dd34e62608419f86e799c97d389c10a0e677a25f236a971b2f40dac9 \ + --hash=sha256:cc8f590a5eed30b314ae6b0232d925519ade433f663de79cc3783e4b10d662ba \ + --hash=sha256:cd7a318a15fe6cc4584bf3c4426f092ed08c0fd012cf2a9173114234fe193e11 \ + --hash=sha256:cf19b5f63a59c20306e034e691402b02055c8f4e38bf6792c23cad489162a642 \ + --hash=sha256:cfc781ce442ec407c841e9aa51d0e1024f72b6ec34caa8fdb6ef9576d549acf2 \ + --hash=sha256:dea9f6f8633571e18bc20cad83603072e697103a567f4b0738d52dd0211b4527 \ + --hash=sha256:e4a86a1d5eb2cce83c5972b3930c7c1eac81ab3508464345e2b8e54f119d5505 \ + --hash=sha256:e7106374d4a74ed9ff00c46cc00f0a9f06a0775f8868e423f85d4464d2333679 \ + --hash=sha256:e98a8a585b5668aa9e34d10f7785abf9545fe72663b4bfc16c99a115185ae6a5 \ + --hash=sha256:f64840e68483316eb58d82c376ad3585ca995e69e33b230436de0cdddf7363f9 \ + --hash=sha256:f8f4b0a9e6683e43889852130595c8854d8ae237f2324a053cdd884de936aa9b \ + --hash=sha256:fc45a53219ed30a7f670a6d8c98527af0020e6fd4ee4c0a8fb59f147f06d816c diff --git a/tools/dev_constraints.txt b/tools/dev_constraints.txt index f5140f9c7..10308bd39 100644 --- a/tools/dev_constraints.txt +++ b/tools/dev_constraints.txt @@ -1,7 +1,7 @@ # Specifies Python package versions for development and building Docker images. # It includes in particular packages not specified in letsencrypt-auto's requirements file. # Some dev package versions specified here may be overridden by higher level constraints -# files during tests (eg. letsencrypt-auto-source/pieces/dependency-requirements.txt). +# files during tests (eg. tools/certbot_constraints.txt). alabaster==0.7.10 apacheconfig==0.3.2 apipkg==1.4 @@ -16,8 +16,8 @@ backports.functools-lru-cache==1.5 backports.shutil-get-terminal-size==1.0.0 backports.ssl-match-hostname==3.7.0.1 bcrypt==3.1.6 -boto3==1.11.7 -botocore==1.14.7 +boto3==1.17.4 +botocore==1.20.4 cached-property==1.5.1 cloudflare==2.3.1 configparser==3.7.4 diff --git a/tools/docker/core/Dockerfile b/tools/docker/core/Dockerfile index 0d3626853..d2ebe3537 100644 --- a/tools/docker/core/Dockerfile +++ b/tools/docker/core/Dockerfile @@ -14,10 +14,6 @@ WORKDIR /opt/certbot # Copy certbot code COPY CHANGELOG.md README.rst src/ -# We keep the relative path to the requirements file the same because, as of -# writing this, tools/pip_install.py is used in the Dockerfile for Certbot -# plugins and this script expects to find the requirements file there. -COPY letsencrypt-auto-source/pieces/dependency-requirements.txt letsencrypt-auto-source/pieces/ COPY tools tools COPY acme src/acme COPY certbot src/certbot diff --git a/tools/pip_install.py b/tools/pip_install.py index c1c81482b..e06650ff2 100755 --- a/tools/pip_install.py +++ b/tools/pip_install.py @@ -57,7 +57,7 @@ def certbot_oldest_processing(tools_path, args, test_constraints): def certbot_normal_processing(tools_path, test_constraints): repo_path = os.path.dirname(tools_path) certbot_requirements = os.path.normpath(os.path.join( - repo_path, 'letsencrypt-auto-source/pieces/dependency-requirements.txt')) + repo_path, 'tools/certbot_constraints.txt')) with open(certbot_requirements, 'r') as fd: certbot_reqs = fd.readlines() with open(os.path.join(tools_path, 'pipstrap_constraints.txt'), 'r') as fd: @@ -76,8 +76,7 @@ def merge_requirements(tools_path, requirements, test_constraints, all_constrain # Here is the order by increasing priority: # 1) The general development constraints (tools/dev_constraints.txt) # 2) The general tests constraints (oldest_requirements.txt or - # certbot-auto's dependency-requirements.txt + pipstrap's constraints - # for the normal processing) + # certbot_constraints.txt + pipstrap's constraints for the normal processing) # 3) The local requirement file, typically local-oldest-requirement in oldest tests files = [os.path.join(tools_path, 'dev_constraints.txt'), test_constraints] if requirements: @@ -134,6 +133,7 @@ def main(args): pip_install_with_print('--force-reinstall --no-deps --requirement "{0}"' .format(requirements)) + print(' '.join(args)) pip_install_with_print(' '.join(args), env=env) diff --git a/tools/rebuild_certbot_constraints.py b/tools/rebuild_certbot_constraints.py new file mode 100755 index 000000000..f5e5d3ca7 --- /dev/null +++ b/tools/rebuild_certbot_constraints.py @@ -0,0 +1,280 @@ +#!/usr/bin/env python +""" +Gather and consolidate the up-to-date dependencies available and required to install certbot +on various Linux distributions. It generates a requirements file contained the pinned and hashed +versions, ready to be used by pip to install the certbot dependencies. + +This script is typically used to update the certbot_constraints.txt file. + +To achieve its purpose, this script will start a certbot installation with unpinned dependencies, +then gather them, on various distributions started as Docker containers. + +Usage: tools/rebuild_certbot_constraints.py new_requirements.txt + +NB1: Docker must be installed on the machine running this script. +NB2: Python library 'hashin' must be installed on the machine running this script. +""" +from __future__ import print_function +import re +import shutil +import subprocess +import tempfile +import os +from os.path import dirname, abspath, join +import sys +import argparse + +# The list of docker distributions to test dependencies against with. +DISTRIBUTION_LIST = [ + 'ubuntu:20.04', 'ubuntu:18.04', 'debian:buster', + 'centos:8', 'centos:7', 'fedora:29', +] + +# These constraints will be added while gathering dependencies on each distribution. +# It can be used because a particular version for a package is required for any reason, +# or to solve a version conflict between two distributions requirements. +AUTHORITATIVE_CONSTRAINTS = { + # Too touchy to move to a new version. And will be removed soon + # in favor of pure python parser for Apache. + 'python-augeas': '0.5.0', + # We avoid cryptography 3.4+ since it requires Rust to compile the wheels, and + # this needs some work on the snap builds. + 'cryptography': '3.3.2', +} + +# ./certbot/tools/rebuild_certbot_constraints.py (2 levels from certbot root path) +CERTBOT_REPO_PATH = dirname(dirname(abspath(__file__))) + +# The script will be used to gather dependencies for a given distribution. +# - bootstrap_os_packages.sh is used to install relevant OS packages, and set up an initial venv +# - then this venv is used to consistently construct an empty new venv +# - once pipstrap.py, this new venv pip-installs certbot runtime (including apache/nginx), +# without pinned dependencies, and respecting input authoritative requirements +# - `certbot plugins` is called to check we have a healthy environment +# - finally current set of dependencies is extracted out of the docker using pip freeze +SCRIPT = r"""#!/bin/sh +set -ex + +cd /tmp/certbot +tests/letstest/scripts/bootstrap_os_packages.sh + +python3 -m venv /tmp/venv + +/tmp/venv/bin/python tools/pipstrap.py +/tmp/venv/bin/pip install -e acme -e certbot -e certbot-apache -e certbot-nginx -c /tmp/constraints.txt +/tmp/venv/bin/certbot plugins +/tmp/venv/bin/pip freeze >> /tmp/workspace/requirements.txt +""" + + +def _read_from(file): + """Read all content of the file, and return it as a string.""" + with open(file, 'r') as file_h: + return file_h.read() + + +def _write_to(file, content): + """Write given string content to the file, overwriting its initial content.""" + with open(file, 'w') as file_h: + file_h.write(content) + + +def _requirements_from_one_distribution(distribution, verbose): + """ + Calculate the Certbot dependencies expressed for the given distribution, using the official + Docker for this distribution, and return the lines of the generated requirements file. + """ + print('===> Gathering dependencies for {0}.'.format(distribution)) + workspace = tempfile.mkdtemp() + script = join(workspace, 'script.sh') + authoritative_constraints = join(workspace, 'constraints.txt') + cid_file = join(workspace, 'cid') + + try: + _write_to(script, SCRIPT) + os.chmod(script, 0o755) + + _write_to(authoritative_constraints, '\n'.join( + '{0}=={1}'.format(package, version) for package, version in AUTHORITATIVE_CONSTRAINTS.items())) + + command = ['docker', 'run', '--rm', '--cidfile', cid_file, + '--network=host', + '-v', '{0}:/tmp/certbot'.format(CERTBOT_REPO_PATH), + '-v', '{0}:/tmp/workspace'.format(workspace), + '-v', '{0}:/tmp/constraints.txt'.format(authoritative_constraints), + distribution, '/tmp/workspace/script.sh'] + sub_stdout = sys.stdout if verbose else subprocess.PIPE + sub_stderr = sys.stderr if verbose else subprocess.STDOUT + process = subprocess.Popen(command, stdout=sub_stdout, stderr=sub_stderr, universal_newlines=True) + stdoutdata, _ = process.communicate() + + if process.returncode: + if stdoutdata: + sys.stderr.write('Output was:\n{0}'.format(stdoutdata)) + raise RuntimeError('Error while gathering dependencies for {0}.'.format(distribution)) + + with open(join(workspace, 'requirements.txt'), 'r') as file_h: + return file_h.readlines() + finally: + if os.path.isfile(cid_file): + cid = _read_from(cid_file) + try: + subprocess.check_output(['docker', 'kill', cid], stderr=subprocess.PIPE) + except subprocess.CalledProcessError: + pass + shutil.rmtree(workspace) + + +def _parse_and_merge_requirements(dependencies_map, requirements_file_lines, distribution): + """ + Extract every requirement from the given requirements file, and merge it in the dependency map. + Merging here means that the map contain every encountered dependency, and the version used in + each distribution. + + Example: + # dependencies_map = { + # } + _parse_and_merge_requirements(['cryptography=='1.2','requests=='2.1.0'], dependencies_map, 'debian:stretch') + # dependencies_map = { + # 'cryptography': [('1.2', 'debian:stretch)], + # 'requests': [('2.1.0', 'debian:stretch')] + # } + _parse_and_merge_requirements(['requests=='2.4.0', 'mock==1.3'], dependencies_map, 'centos:7') + # dependencies_map = { + # 'cryptography': [('1.2', 'debian:stretch)], + # 'requests': [('2.1.0', 'debian:stretch'), ('2.4.0', 'centos:7')], + # 'mock': [('2.4.0', 'centos:7')] + # } + """ + for line in requirements_file_lines: + match = re.match(r'([^=]+)==([^=]+)', line.strip()) + if not line.startswith('-e') and not line.startswith('#') and match: + package, version = match.groups() + if package not in ['acme', 'certbot', 'certbot-apache', 'certbot-nginx', 'pkg-resources']: + dependencies_map.setdefault(package, []).append((version, distribution)) + + +def _consolidate_and_validate_dependencies(dependency_map): + """ + Given the dependency map of all requirements found in all distributions for Certbot, + construct an array containing the unit requirements for Certbot to be used by pip, + and the version conflicts, if any, between several distributions for a package. + Return requirements and conflicts as a tuple. + """ + print('===> Consolidate and validate the dependency map.') + requirements = [] + conflicts = [] + for package, versions in dependency_map.items(): + reduced_versions = _reduce_versions(versions) + + if len(reduced_versions) > 1: + version_list = ['{0} ({1})'.format(version, ','.join(distributions)) + for version, distributions in reduced_versions.items()] + conflict = ('package {0} is declared with several versions: {1}' + .format(package, ', '.join(version_list))) + conflicts.append(conflict) + sys.stderr.write('ERROR: {0}\n'.format(conflict)) + else: + requirements.append((package, list(reduced_versions)[0])) + + requirements.sort(key=lambda x: x[0]) + return requirements, conflicts + + +def _reduce_versions(version_dist_tuples): + """ + Get an array of version/distribution tuples, + and reduce it to a map based on the version values. + + Example: [('1.2.0', 'debian:stretch'), ('1.4.0', 'ubuntu:18.04'), ('1.2.0', 'centos:6')] + => {'1.2.0': ['debiqn:stretch', 'centos:6'], '1.4.0': ['ubuntu:18.04']} + """ + version_dist_map = {} + for version, distribution in version_dist_tuples: + version_dist_map.setdefault(version, []).append(distribution) + + return version_dist_map + + +def _write_requirements(dest_file, requirements, conflicts): + """ + Given the list of requirements and conflicts, write a well-formatted requirements file, + whose requirements are hashed signed using hashin library. Conflicts are written at the end + of the generated file. + """ + print('===> Calculating hashes for the requirement file.') + + _write_to(dest_file, '''\ +# This is the flattened list of pinned packages to build certbot deployable artifacts. +# To generate this, do (with docker and package hashin installed): +# ``` +# tools/rebuild_certbot_contraints.py \\ +# tools/certbot_constraints.txt +# ``` +# If you want to update a single dependency, run commands similar to these: +# ``` +# pip install hashin +# hashin -r dependency-requirements.txt cryptography==1.5.2 +# ``` +''') + + for req in requirements: + if req[0] in AUTHORITATIVE_CONSTRAINTS: + # If requirement is in AUTHORITATIVE_CONSTRAINTS, take its value instead of the + # computed one to get any environment descriptor that would have been added. + req = (req[0], AUTHORITATIVE_CONSTRAINTS[req[0]]) + subprocess.check_call(['hashin', '{0}=={1}'.format(req[0], req[1]), + '--requirements-file', dest_file]) + + if conflicts: + with open(dest_file, 'a') as file_h: + file_h.write('\n## ! SOME ERRORS OCCURRED ! ##\n') + file_h.write('\n'.join('# {0}'.format(conflict) for conflict in conflicts)) + file_h.write('\n') + + return _read_from(dest_file) + + +def _gather_dependencies(dest_file, verbose): + """ + Main method of this script. Given a destination file path, will write the file + containing the consolidated and hashed requirements for Certbot, validated + against several Linux distributions. + """ + dependencies_map = {} + + for distribution in DISTRIBUTION_LIST: + requirements_file_lines = _requirements_from_one_distribution(distribution, verbose) + _parse_and_merge_requirements(dependencies_map, requirements_file_lines, distribution) + + requirements, conflicts = _consolidate_and_validate_dependencies(dependencies_map) + + return _write_requirements(dest_file, requirements, conflicts) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description=('Build a sanitized, pinned and hashed requirements file for certbot deployable' + ' artifacts, validated against several OS distributions using Docker.')) + parser.add_argument('requirements_path', + help='path for the generated requirements file') + parser.add_argument('--verbose', '-v', action='store_true', + help='verbose will display all output during docker execution') + + namespace = parser.parse_args() + + try: + subprocess.check_output(['hashin', '--version']) + except subprocess.CalledProcessError: + raise RuntimeError('Python library hashin is not installed in the current environment.') + + try: + subprocess.check_output(['docker', '--version'], stderr=subprocess.STDOUT) + except subprocess.CalledProcessError: + raise RuntimeError('Docker is not installed or accessible to current user.') + + file_content = _gather_dependencies(namespace.requirements_path, namespace.verbose) + + print(file_content) + print('===> Rebuilt requirement file is available on path {0}' + .format(abspath(namespace.requirements_path))) diff --git a/tools/snap/generate_dnsplugins_all.sh b/tools/snap/generate_dnsplugins_all.sh index 40404bf9b..976b0dd7b 100755 --- a/tools/snap/generate_dnsplugins_all.sh +++ b/tools/snap/generate_dnsplugins_all.sh @@ -10,7 +10,7 @@ for PLUGIN_PATH in "${CERTBOT_DIR}"/certbot-dns-*; do bash "${CERTBOT_DIR}"/tools/snap/generate_dnsplugins_postrefreshhook.sh $PLUGIN_PATH # Create constraints file "${CERTBOT_DIR}"/tools/merge_requirements.py tools/dev_constraints.txt \ - <("${CERTBOT_DIR}"/tools/strip_hashes.py letsencrypt-auto-source/pieces/dependency-requirements.txt) \ + <("${CERTBOT_DIR}"/tools/strip_hashes.py tools/certbot_constraints.txt) \ <("${CERTBOT_DIR}"/tools/strip_hashes.py tools/pipstrap_constraints.txt) \ > "${PLUGIN_PATH}"/snap-constraints.txt done diff --git a/windows-installer/construct.py b/windows-installer/construct.py index 60834e7e5..eb199a7e1 100644 --- a/windows-installer/construct.py +++ b/windows-installer/construct.py @@ -2,6 +2,7 @@ import contextlib import ctypes import os +import re import shutil import struct import subprocess @@ -52,6 +53,21 @@ def _compile_wheels(repo_path, build_path, venv_python): command.extend(wheels_project) subprocess.check_call(command, env=env) + # Cryptography uses now a unique wheel name "cryptography-VERSION-cpXX-abi3-win32.whl where + # cpXX is the lowest supported version of Python (eg. cp36 says that the wheel is compatible + # with Python 3.6+). While technically valid to describe a wheel compliant with the Stable + # Application Binary Interface, this naming convention makes pynsist falsely think that the + # wheel is compatible with Python 3.6 only. + # Let's trick pynsist by renaming the wheel until this is fixed upstream. + for file in os.listdir(wheels_path): + # Given that our Python version is 3.8, this rename files like + # cryptography-VERSION-cpXX-abi3-win32.whl into cryptography-VERSION-cp38-abi3-win32.whl + renamed = re.sub(r'^(.*)-cp\d+-abi3-(\w+)\.whl$', r'\1-cp{0}{1}-abi3-\2.whl' + .format(PYTHON_VERSION[0], PYTHON_VERSION[1]), file) + print(renamed) + if renamed != file: + os.replace(os.path.join(wheels_path, file), os.path.join(wheels_path, renamed)) + def _prepare_build_tools(venv_path, venv_python, repo_path): print('Prepare build tools') @@ -63,7 +79,7 @@ def _prepare_build_tools(venv_path, venv_python, repo_path): @contextlib.contextmanager def _prepare_constraints(repo_path): - reqs_certbot = os.path.join(repo_path, 'letsencrypt-auto-source', 'pieces', 'dependency-requirements.txt') + reqs_certbot = os.path.join(repo_path, 'tools', 'certbot_constraints.txt') reqs_pipstrap = os.path.join(repo_path, 'tools', 'pipstrap_constraints.txt') constraints_certbot = subprocess.check_output( [sys.executable, os.path.join(repo_path, 'tools', 'strip_hashes.py'), reqs_certbot], -- cgit v1.2.3