Welcome to mirror list, hosted at ThFree Co, Russian Federation.

github.com/certbot/certbot.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorErik Rose <erik@mozilla.com>2016-02-19 20:36:11 +0300
committerErik Rose <erik@mozilla.com>2016-02-19 23:13:58 +0300
commitb95a01a15cb20cfd5249180d3a1e9e7da2110d36 (patch)
tree8ab1a47abd86c8e47d539ffcf985e048697c5781 /letsencrypt-auto
parent9b21efc6b817884519fc8d8bdb31d7b490d8edee (diff)
Turn the root-level letsencrypt-auto symlink into a regular file. Close #2501.
It will always be a copy of the latest release version, 0.4 in this case. (Modify the release script to make that so.) This way, people using the old method of running le-auto from a git checkout will not end up using a bleeding-edge version, letting us work on the tip-of-tree version more freely.
Diffstat (limited to 'letsencrypt-auto')
-rwxr-xr-x[l---------]letsencrypt-auto1810
1 files changed, 1809 insertions, 1 deletions
diff --git a/letsencrypt-auto b/letsencrypt-auto
index af7e83a70..9218bdc52 120000..100755
--- a/letsencrypt-auto
+++ b/letsencrypt-auto
@@ -1 +1,1809 @@
-letsencrypt-auto-source/letsencrypt-auto \ No newline at end of file
+#!/bin/sh
+#
+# Download and run the latest release version of the Let's Encrypt client.
+#
+# NOTE: THIS SCRIPT IS AUTO-GENERATED AND SELF-UPDATING
+#
+# IF YOU WANT TO EDIT IT LOCALLY, *ALWAYS* RUN YOUR COPY WITH THE
+# "--no-self-upgrade" FLAG
+#
+# IF YOU WANT TO SEND PULL REQUESTS, THE REAL SOURCE FOR THIS FILE IS
+# letsencrypt-auto-source/letsencrypt-auto.template AND
+# letsencrypt-auto-source/pieces/bootstrappers/*
+
+set -e # Work even if somebody does "sh thisscript.sh".
+
+# Note: you can set XDG_DATA_HOME or VENV_PATH before running this script,
+# if you want to change where the virtual environment will be installed
+XDG_DATA_HOME=${XDG_DATA_HOME:-~/.local/share}
+VENV_NAME="letsencrypt"
+VENV_PATH=${VENV_PATH:-"$XDG_DATA_HOME/$VENV_NAME"}
+VENV_BIN=${VENV_PATH}/bin
+LE_AUTO_VERSION="0.4.0"
+
+# This script takes the same arguments as the main letsencrypt program, but it
+# additionally responds to --verbose (more output) and --debug (allow support
+# for experimental platforms)
+for arg in "$@" ; do
+ # This first clause is redundant with the third, but hedging on portability
+ if [ "$arg" = "-v" ] || [ "$arg" = "--verbose" ] || echo "$arg" | grep -E -- "-v+$" ; then
+ VERBOSE=1
+ elif [ "$arg" = "--no-self-upgrade" ] ; then
+ # Do not upgrade this script (also prevents client upgrades, because each
+ # copy of the script pins a hash of the python client)
+ NO_SELF_UPGRADE=1
+ elif [ "$arg" = "--os-packages-only" ] ; then
+ OS_PACKAGES_ONLY=1
+ elif [ "$arg" = "--debug" ]; then
+ DEBUG=1
+ fi
+done
+
+# letsencrypt-auto needs root access to bootstrap OS dependencies, and
+# letsencrypt itself needs root access for almost all modes of operation
+# The "normal" case is that sudo is used for the steps that need root, but
+# this script *can* be run as root (not recommended), or fall back to using
+# `su`
+if test "`id -u`" -ne "0" ; then
+ if command -v sudo 1>/dev/null 2>&1; then
+ SUDO=sudo
+ else
+ echo \"sudo\" is not available, will use \"su\" for installation steps...
+ # Because the parameters in `su -c` has to be a string,
+ # we need properly escape it
+ su_sudo() {
+ args=""
+ # This `while` loop iterates over all parameters given to this function.
+ # For each parameter, all `'` will be replace by `'"'"'`, and the escaped string
+ # will be wrapped in a pair of `'`, then appended to `$args` string
+ # For example, `echo "It's only 1\$\!"` will be escaped to:
+ # 'echo' 'It'"'"'s only 1$!'
+ # │ │└┼┘│
+ # │ │ │ └── `'s only 1$!'` the literal string
+ # │ │ └── `\"'\"` is a single quote (as a string)
+ # │ └── `'It'`, to be concatenated with the strings following it
+ # └── `echo` wrapped in a pair of `'`, it's totally fine for the shell command itself
+ while [ $# -ne 0 ]; do
+ args="$args'$(printf "%s" "$1" | sed -e "s/'/'\"'\"'/g")' "
+ shift
+ done
+ su root -c "$args"
+ }
+ SUDO=su_sudo
+ fi
+else
+ SUDO=
+fi
+
+ExperimentalBootstrap() {
+ # Arguments: Platform name, bootstrap function name
+ if [ "$DEBUG" = 1 ]; then
+ if [ "$2" != "" ]; then
+ echo "Bootstrapping dependencies via $1..."
+ $2
+ fi
+ else
+ echo "WARNING: $1 support is very experimental at present..."
+ echo "if you would like to work on improving it, please ensure you have backups"
+ echo "and then run this script again with the --debug flag!"
+ exit 1
+ fi
+}
+
+DeterminePythonVersion() {
+ if command -v python2.7 > /dev/null ; then
+ export LE_PYTHON=${LE_PYTHON:-python2.7}
+ elif command -v python27 > /dev/null ; then
+ export LE_PYTHON=${LE_PYTHON:-python27}
+ elif command -v python2 > /dev/null ; then
+ export LE_PYTHON=${LE_PYTHON:-python2}
+ elif command -v python > /dev/null ; then
+ export LE_PYTHON=${LE_PYTHON:-python}
+ else
+ echo "Cannot find any Pythons... please install one!"
+ exit 1
+ fi
+
+ PYVER=`"$LE_PYTHON" --version 2>&1 | cut -d" " -f 2 | cut -d. -f1,2 | sed 's/\.//'`
+ if [ $PYVER -lt 26 ]; then
+ echo "You have an ancient version of Python entombed in your operating system..."
+ echo "This isn't going to work; you'll need at least version 2.6."
+ exit 1
+ fi
+}
+
+BootstrapDebCommon() {
+ # Current version tested with:
+ #
+ # - Ubuntu
+ # - 14.04 (x64)
+ # - 15.04 (x64)
+ # - Debian
+ # - 7.9 "wheezy" (x64)
+ # - sid (2015-10-21) (x64)
+
+ # Past versions tested with:
+ #
+ # - Debian 8.0 "jessie" (x64)
+ # - Raspbian 7.8 (armhf)
+
+ # Believed not to work:
+ #
+ # - Debian 6.0.10 "squeeze" (x64)
+
+ $SUDO apt-get update || echo apt-get update hit problems but continuing anyway...
+
+ # virtualenv binary can be found in different packages depending on
+ # distro version (#346)
+
+ virtualenv=
+ if apt-cache show virtualenv > /dev/null 2>&1; then
+ virtualenv="virtualenv"
+ fi
+
+ if apt-cache show python-virtualenv > /dev/null 2>&1; then
+ virtualenv="$virtualenv python-virtualenv"
+ fi
+
+ augeas_pkg="libaugeas0 augeas-lenses"
+ AUGVERSION=`apt-cache show --no-all-versions libaugeas0 | grep ^Version: | cut -d" " -f2`
+
+ AddBackportRepo() {
+ # ARGS:
+ BACKPORT_NAME="$1"
+ BACKPORT_SOURCELINE="$2"
+ if ! grep -v -e ' *#' /etc/apt/sources.list | grep -q "$BACKPORT_NAME" ; then
+ # This can theoretically error if sources.list.d is empty, but in that case we don't care.
+ if ! grep -v -e ' *#' /etc/apt/sources.list.d/* 2>/dev/null | grep -q "$BACKPORT_NAME"; then
+ /bin/echo -n "Installing augeas from $BACKPORT_NAME in 3 seconds..."
+ sleep 1s
+ /bin/echo -ne "\e[0K\rInstalling augeas from $BACKPORT_NAME in 2 seconds..."
+ sleep 1s
+ /bin/echo -e "\e[0K\rInstalling augeas from $BACKPORT_NAME in 1 second ..."
+ sleep 1s
+ if echo $BACKPORT_NAME | grep -q wheezy ; then
+ /bin/echo '(Backports are only installed if explicitly requested via "apt-get install -t wheezy-backports")'
+ fi
+
+ sudo sh -c "echo $BACKPORT_SOURCELINE >> /etc/apt/sources.list.d/$BACKPORT_NAME.list"
+ $SUDO apt-get update
+ fi
+ fi
+ $SUDO apt-get install -y --no-install-recommends -t "$BACKPORT_NAME" $augeas_pkg
+ augeas_pkg=
+
+ }
+
+
+ if dpkg --compare-versions 1.0 gt "$AUGVERSION" ; then
+ if lsb_release -a | grep -q wheezy ; then
+ AddBackportRepo wheezy-backports "deb http://http.debian.net/debian wheezy-backports main"
+ elif lsb_release -a | grep -q precise ; then
+ # XXX add ARM case
+ AddBackportRepo precise-backports "deb http://archive.ubuntu.com/ubuntu precise-backports main restricted universe multiverse"
+ else
+ echo "No libaugeas0 version is available that's new enough to run the"
+ echo "Let's Encrypt apache plugin..."
+ fi
+ # XXX add a case for ubuntu PPAs
+ fi
+
+ $SUDO apt-get install -y --no-install-recommends \
+ python \
+ python-dev \
+ $virtualenv \
+ gcc \
+ dialog \
+ $augeas_pkg \
+ libssl-dev \
+ libffi-dev \
+ ca-certificates \
+
+
+
+ if ! command -v virtualenv > /dev/null ; then
+ echo Failed to install a working \"virtualenv\" command, exiting
+ exit 1
+ fi
+}
+
+BootstrapRpmCommon() {
+ # Tested with:
+ # - Fedora 22, 23 (x64)
+ # - Centos 7 (x64: on DigitalOcean droplet)
+ # - CentOS 7 Minimal install in a Hyper-V VM
+
+ if type dnf 2>/dev/null
+ then
+ tool=dnf
+ elif type yum 2>/dev/null
+ then
+ tool=yum
+
+ else
+ echo "Neither yum nor dnf found. Aborting bootstrap!"
+ exit 1
+ fi
+
+ # Some distros and older versions of current distros use a "python27"
+ # instead of "python" naming convention. Try both conventions.
+ if ! $SUDO $tool install -y \
+ python \
+ python-devel \
+ python-virtualenv \
+ python-tools \
+ python-pip
+ then
+ if ! $SUDO $tool install -y \
+ python27 \
+ python27-devel \
+ python27-virtualenv \
+ python27-tools \
+ python27-pip
+ then
+ echo "Could not install Python dependencies. Aborting bootstrap!"
+ exit 1
+ fi
+ fi
+
+ if ! $SUDO $tool install -y \
+ gcc \
+ dialog \
+ augeas-libs \
+ openssl \
+ openssl-devel \
+ libffi-devel \
+ redhat-rpm-config \
+ ca-certificates
+ then
+ echo "Could not install additional dependencies. Aborting bootstrap!"
+ exit 1
+ fi
+
+
+ if $SUDO $tool list installed "httpd" >/dev/null 2>&1; then
+ if ! $SUDO $tool install -y mod_ssl
+ then
+ echo "Apache found, but mod_ssl could not be installed."
+ fi
+ fi
+}
+
+BootstrapSuseCommon() {
+ # SLE12 don't have python-virtualenv
+
+ $SUDO zypper -nq in -l \
+ python \
+ python-devel \
+ python-virtualenv \
+ gcc \
+ dialog \
+ augeas-lenses \
+ libopenssl-devel \
+ libffi-devel \
+ ca-certificates
+}
+
+BootstrapArchCommon() {
+ # Tested with:
+ # - ArchLinux (x86_64)
+ #
+ # "python-virtualenv" is Python3, but "python2-virtualenv" provides
+ # only "virtualenv2" binary, not "virtualenv" necessary in
+ # ./tools/_venv_common.sh
+
+ deps="
+ python2
+ python-virtualenv
+ gcc
+ dialog
+ augeas
+ openssl
+ libffi
+ ca-certificates
+ pkg-config
+ "
+
+ missing=$("$SUDO" pacman -T $deps)
+
+ if [ "$missing" ]; then
+ "$SUDO" pacman -S --needed $missing
+ fi
+}
+
+BootstrapGentooCommon() {
+ PACKAGES="
+ dev-lang/python:2.7
+ dev-python/virtualenv
+ dev-util/dialog
+ app-admin/augeas
+ dev-libs/openssl
+ dev-libs/libffi
+ app-misc/ca-certificates
+ virtual/pkgconfig"
+
+ case "$PACKAGE_MANAGER" in
+ (paludis)
+ "$SUDO" cave resolve --keep-targets if-possible $PACKAGES -x
+ ;;
+ (pkgcore)
+ "$SUDO" pmerge --noreplace $PACKAGES
+ ;;
+ (portage|*)
+ "$SUDO" emerge --noreplace $PACKAGES
+ ;;
+ esac
+}
+
+BootstrapFreeBsd() {
+ "$SUDO" pkg install -Ay \
+ python \
+ py27-virtualenv \
+ augeas \
+ libffi
+}
+
+BootstrapMac() {
+ if ! hash brew 2>/dev/null; then
+ echo "Homebrew Not Installed\nDownloading..."
+ ruby -e "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install)"
+ fi
+
+ brew install augeas
+ brew install dialog
+
+ if ! hash pip 2>/dev/null; then
+ echo "pip Not Installed\nInstalling python from Homebrew..."
+ brew install python
+ fi
+
+ if ! hash virtualenv 2>/dev/null; then
+ echo "virtualenv Not Installed\nInstalling with pip"
+ pip install virtualenv
+ fi
+}
+
+
+# Install required OS packages:
+Bootstrap() {
+ if [ -f /etc/debian_version ]; then
+ echo "Bootstrapping dependencies for Debian-based OSes..."
+ BootstrapDebCommon
+ elif [ -f /etc/redhat-release ]; then
+ echo "Bootstrapping dependencies for RedHat-based OSes..."
+ BootstrapRpmCommon
+ elif [ -f /etc/os-release ] && `grep -q openSUSE /etc/os-release` ; then
+ echo "Bootstrapping dependencies for openSUSE-based OSes..."
+ BootstrapSuseCommon
+ elif [ -f /etc/arch-release ]; then
+ if [ "$DEBUG" = 1 ]; then
+ echo "Bootstrapping dependencies for Archlinux..."
+ BootstrapArchCommon
+ else
+ echo "Please use pacman to install letsencrypt packages:"
+ echo "# pacman -S letsencrypt letsencrypt-apache"
+ echo
+ echo "If you would like to use the virtualenv way, please run the script again with the"
+ echo "--debug flag."
+ exit 1
+ fi
+ elif [ -f /etc/manjaro-release ]; then
+ ExperimentalBootstrap "Manjaro Linux" BootstrapArchCommon
+ elif [ -f /etc/gentoo-release ]; then
+ ExperimentalBootstrap "Gentoo" BootstrapGentooCommon
+ elif uname | grep -iq FreeBSD ; then
+ ExperimentalBootstrap "FreeBSD" BootstrapFreeBsd
+ elif uname | grep -iq Darwin ; then
+ ExperimentalBootstrap "Mac OS X" BootstrapMac
+ elif grep -iq "Amazon Linux" /etc/issue ; then
+ ExperimentalBootstrap "Amazon Linux" BootstrapRpmCommon
+ else
+ echo "Sorry, I don't know how to bootstrap Let's Encrypt on your operating system!"
+ echo
+ echo "You will need to bootstrap, configure virtualenv, and run a peep install manually."
+ echo "Please see https://letsencrypt.readthedocs.org/en/latest/contributing.html#prerequisites"
+ echo "for more info."
+ fi
+}
+
+TempDir() {
+ mktemp -d 2>/dev/null || mktemp -d -t 'le' # Linux || OS X
+}
+
+
+
+if [ "$NO_SELF_UPGRADE" = 1 ]; then
+ # Phase 2: Create venv, install LE, and run.
+
+ if [ -f "$VENV_BIN/letsencrypt" ]; then
+ INSTALLED_VERSION=$("$VENV_BIN/letsencrypt" --version 2>&1 | cut -d " " -f 2)
+ else
+ INSTALLED_VERSION="none"
+ fi
+ if [ "$LE_AUTO_VERSION" != "$INSTALLED_VERSION" ]; then
+ echo "Creating virtual environment..."
+ DeterminePythonVersion
+ rm -rf "$VENV_PATH"
+ if [ "$VERBOSE" = 1 ]; then
+ virtualenv --no-site-packages --python "$LE_PYTHON" "$VENV_PATH"
+ else
+ virtualenv --no-site-packages --python "$LE_PYTHON" "$VENV_PATH" > /dev/null
+ fi
+
+ echo "Installing Python packages..."
+ TEMP_DIR=$(TempDir)
+ # There is no $ interpolation due to quotes on starting heredoc delimiter.
+ # -------------------------------------------------------------------------
+ cat << "UNLIKELY_EOF" > "$TEMP_DIR/letsencrypt-auto-requirements.txt"
+# This is the flattened list of packages letsencrypt-auto installs. To generate
+# this, do `pip install --no-cache-dir -e acme -e . -e letsencrypt-apache`, and
+# then use `hashin` or a more secure method to gather the hashes.
+
+# sha256: wxZH7baf09RlqEfqMVfTe-0flfGXYLEaR6qRwEtmYxQ
+# sha256: YrCJpVvh2JSc0rx-DfC9254Cj678jDIDjMhIYq791uQ
+argparse==1.4.0
+
+# sha256: U8HJ3bMEMVE-t_PN7wo-BrDxJSGIqqd0SvD1pM1F268
+# sha256: pWj0nfyhKo2fNwGHJX78WKOBCeHu5xTZKFYdegGKZPg
+# sha256: gJxsqM-8ruv71DK0V2ABtA04_yRjdzy1dXfXXhoCC8M
+# sha256: hs3KLNnLpBQiIwOQ3xff6qnzRKkR45dci-naV7NVSOk
+# sha256: JLE9uErsOFyiPHuN7YPvi7QXe8GB0UdY-fl1vl0CDYY
+# sha256: lprv_XwOCX9r4e_WgsFWriJlkaB5OpS2wtXkKT9MjU4
+# sha256: AA81jUsPokn-qrnBzn1bL-fgLnvfaAbCZBhQX8aF4mg
+# sha256: qdhvRgu9g1ii1ROtd54_P8h447k6ALUAL66_YW_-a5w
+# sha256: MSezqzPrI8ysBx-aCAJ0jlz3xcvNAkgrsGPjW0HbsLA
+# sha256: 4rLUIjZGmkAiTTnntsYFdfOIsvQj81TH7pClt_WMgGU
+# sha256: jC3Mr-6JsbQksL7GrS3ZYiyUnSAk6Sn12h7YAerHXx0
+# sha256: pN56TRGu1Ii6tPsU9JiFh6gpvs5aIEM_eA1uM7CAg8s
+# sha256: XKj-MEJSZaSSdOSwITobyY9LE0Sa5elvmEdx5dg-WME
+# sha256: pP04gC9Z5xTrqBoCT2LbcQsn2-J6fqEukRU3MnqoTTA
+# sha256: hs1pErvIPpQF1Kc81_S07oNTZS0tvHyCAQbtW00bqzo
+# sha256: jx0XfTZOo1kAQVriTKPkcb49UzTtBBkpQGjEn0WROZg
+cffi==1.4.2
+
+# sha256: O1CoPdWBSd_O6Yy2VlJl0QtT6cCivKfu73-19VJIkKc
+ConfigArgParse==0.10.0
+
+# sha256: ovVlB3DhyH-zNa8Zqbfrc_wFzPIhROto230AzSvLCQI
+configobj==5.0.6
+
+# sha256: 1U_hszrB4J8cEj4vl0948z6V1h1PSALdISIKXD6MEX0
+# sha256: B1X2aE4RhSAFs2MTdh7ctbqEOmTNAizhrC3L1JqTYG0
+# sha256: zjhNo4lZlluh90VKJfVp737yqxRd8ueiml4pS3TgRnc
+# sha256: GvQDkV3LmWHDB2iuZRr6tpKC0dpaut-mN1IhrBGHdQM
+# sha256: ag08d91PH-W8ZfJ--3fsjQSjiNpesl66DiBAwJgZ30o
+# sha256: KdelgcO6_wTh--IAaltHjZ7cfPmib8ijWUkkf09lA3k
+# sha256: IPAWEKpAh_bVadjMIMR4uB8DhIYnWqqx3Dx12VAsZ-A
+# sha256: l9hGUIulDVomml82OK4cFmWbNTFaH0B_oVF2cH2j0Jc
+# sha256: djfqRMLL1NsvLKccsmtmPRczORqnafi8g2xZVilbd5g
+# sha256: gR-eqJVbPquzLgQGU0XDB4Ui5rPuPZLz0n08fNcWpjM
+# sha256: DXCMjYz97Qm4fCoLqHY856ZjWG4EPmrEL9eDHpKQHLY
+# sha256: Efnq11YqPgATWGytM5o_em9Yg8zhw7S5jhrGnft3p_Y
+# sha256: dNhnm55-0ePs-wq1NNyTUruxz3PTYsmQkJTAlyivqJY
+# sha256: z1Hd-123eBaiB1OKZgEUuC4w4IAD_uhJmwILi4SA2sU
+# sha256: 47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU
+# sha256: dITvgYGUFB3_eUdf-74vd6-FHiw7v-Lk1ZEjEi-KTjM
+# sha256: 7gLB6J7l7pUBV6VK1YTXN8Ec83putMCFPozz8n6WLcA
+# sha256: pfGPaxhQpVVKV9v2YsrSUSpGBW5paHJqmFjngN1bnQo
+# sha256: 26GA8xrb5xi6qdbPirY0hJSwlLK4GAL_8zvVDSfRPnM
+# sha256: 5RinlLjzjoOC9_B3kUGBPOtIE6z9MRVBwNsOGJ69eN4
+# sha256: f1FFn4TWcERCdeYVg59FQsk1R6Euk4oKSQba_l994VM
+cryptography==1.1.2
+
+# sha256: JHXX_N31lR6S_1RpcnWIAt5SYL9Akxmp8ZNOa7yLHcc
+# sha256: NZB977D5krdat3iPZf7cHPIP-iJojg5vbxKvwGs-pQE
+enum34==1.1.2
+
+# sha256: _1rZ4vjZ5dHou_vPR3IqtSfPDVHK7u2dptD0B5k4P94
+# sha256: 2Dzm3wsOpmGHAP4ds1NSY5Goo62ht6ulL-16Ydp3IDM
+funcsigs==0.4
+
+# sha256: my_FC9PEujBrllG2lBHvIgJtTYM1uTr8IhTO8SRs5wc
+# sha256: FhmarZOLKQ9b4QV8Dh78ZUYik5HCPOphypQMEV99PTs
+idna==2.0
+
+# sha256: k1cSgAzkdgcB2JrWd2Zs1SaR_S9vCzQMi0I5o8F5iKU
+# sha256: WjGCsyKnBlJcRigspvBk0noCz_vUSfn0dBbx3JaqcbA
+ipaddress==1.0.16
+
+# sha256: 54vpwKDfy6xxL-BPv5K5bN2ugLG4QvJCSCFMhJbwBu8
+# sha256: Syb_TnEQ23butvWntkqCYjg51ZXCA47tpmLyott46Xw
+linecache2==1.0.0
+
+# sha256: 6MFV_evZxLywgQtO0BrhmHVUse4DTddTLXuP2uOKYnQ
+ndg-httpsclient==0.4.0
+
+# sha256: HDW0rCBs7y0kgWyJ-Jzyid09OM98RJuz-re_bUPwGx8
+ordereddict==1.1
+
+# sha256: OnTxAPkNZZGDFf5kkHca0gi8PxOv0y01_P5OjQs7gSs
+# sha256: Paa-K-UG9ZzOMuGeMOIBBT4btNB-JWaJGOAPikmtQKs
+parsedatetime==1.5
+
+# sha256: Rsjbda51oFa9HMB_ohc0_i5gPRGgeDPswe63TDXHLgw
+# sha256: 4hJ2JqkebIhduJZol22zECDwry2nKJJLVkgPx8zwlkk
+pbr==1.8.1
+
+# sha256: WE8LKfzF1SO0M8uJGLL8dNZ-MO4LRKlbrwMVKPQkYZ8
+# sha256: KMoLbp2Zqo3Chuh0ekRxNitpgSolKR3im2qNcKFUWg0
+# sha256: FnrV__UqZyxN3BwaCyUUbWgT67CKmqsKOsRfiltmnDs
+# sha256: 5t6mFzqYhye7Ij00lzSa1c3vXAsoLv8tg-X5BlxT-F8
+# sha256: KvXgpKrWYEmVXQc0qk49yMqhep6vi0waJ6Xx7m5A9vw
+# sha256: 2YhNwNwuVeJEjklXeNyYmcHIvzeusvQ0wb6nSvk8JoM
+# sha256: 4nwv5t_Mhzi-PSxaAi94XrcpcQV-Gp4eNPunO86KcaY
+# sha256: Za_W_syPOu0J7kvmNYO8jrRy8GzqpP4kxNHVoaPA4T8
+# sha256: uhxVj7_N-UUVwjlLEVXB3FbivCqcF9MDSYJ8ntimfkY
+# sha256: upXqACLctk028MEzXAYF-uNb3z4P6o2S9dD2RWo15Vs
+# sha256: QhtlkdFrUJqqjYwVgh1mu5TLSo3EOFytXFG4XUoJbYU
+# sha256: MmswXL22-U2vv-LCaxHaiLCrB7igf4GIq511_wxuhBo
+# sha256: mu3lsrb-RrN0jqjlIURDiQ0WNAJ77z0zt9rRZVaDAng
+# sha256: c77R24lNGqnDx-YR0wLN6reuig3A7q92cnh42xrFzYc
+# sha256: k1td1tVYr1EvQlAafAj0HXr_E5rxuzlZ2qOruFkjTWw
+# sha256: TKARHPFX3MDy9poyPFtUeHGNaNRfyUNdhL4OwPGGIVs
+# sha256: tvE8lTmKP88CJsTc-kSFYLpYZSWc2W7CgQZYZR6TIYk
+# sha256: 7mvjDRY1u96kxDJdUH3IoNu95-HBmL1i3bn0MZi54hQ
+# sha256: 36eGhYwmjX-74bYXXgAewCc418-uCnzne_m2Ua9nZyk
+# sha256: qnf53nKvnBbMKIzUokz1iCQ4j1fXqB5ADEYWRXYphw4
+# sha256: 9QAJM1fQTagUDYeTLKwuVO9ZKlTKinQ6uyhQ9gwsIus
+psutil==3.3.0
+
+# sha256: YfnZnjzvZf6xv-Oi7vepPrk4GdNFv1S81C9OY9UgTa4
+# sha256: GAKm3TIEXkcqQZ2xRBrsq0adM-DSdJ4ZKr3sUhAXJK8
+# sha256: NQJc2UIsllBJEvBOLxX-eTkKhZe0MMLKXQU0z5MJ_6A
+# sha256: L5btWgwynKFiMLMmyhK3Rh7I9l4L4-T5l1FvNr-Co0U
+# sha256: KP7kQheZHPrZ5qC59-PyYEHiHryWYp6U5YXM0F1J-mU
+# sha256: Mm56hUoX-rB2kSBHR2lfj2ktZ0WIo1XEQfsU9mC_Tmg
+# sha256: zaWpBIVwnKZ5XIYFbD5f5yZgKLBeU_HVJ_35OmNlprg
+# sha256: DLKhR0K1Q_3Wj5MaFM44KRhu0rGyJnoGeHOIyWst2b4
+# sha256: UZH_a5Em0sA53Yf4_wJb7SdLrwf6eK-kb1VrGtcmXW4
+# sha256: gyPgNjey0HLMcEEwC6xuxEjDwolQq0A3YDZ4jpoa9ik
+# sha256: hTys2W0fcB3dZ6oD7MBfUYkBNbcmLpInEBEvEqLtKn8
+pyasn1==0.1.9
+
+# sha256: eVm0p0q9wnsxL-0cIebK-TCc4LKeqGtZH9Lpns3yf3M
+pycparser==2.14
+
+# sha256: iORea7Jd_tJyoe8ucoRh1EtjTCzWiemJtuVqNJxaOuU
+# sha256: 8KJgcNbbCIHei8x4RpNLfDyTDY-cedRYg-5ImEvA1nI
+pyOpenSSL==0.15.1
+
+# sha256: 7qMYNcVuIJavQ2OldFp4SHimHQQ-JH06bWoKMql0H1Y
+# sha256: jfvGxFi42rocDzYgqMeACLMjomiye3NZ6SpK5BMl9TU
+pyRFC3339==1.0
+
+# sha256: Z9WdZs26jWJOA4m4eyqDoXbyHxaodVO1D1cDsj8pusI
+python-augeas==0.5.0
+
+# sha256: BOk_JJlcQ92Q8zjV2GXKcs4_taU1jU2qSWVXHbNfw-w
+# sha256: Pm9ZP-rZj4pSa8PjBpM1MyNuM3KfVS9SiW6lBPVTE_o
+python2-pythondialog==3.3.0
+
+# sha256: Or5qbT_C-75MYBRCEfRdou2-MYKm9lEa9ru6BZix-ZI
+# sha256: k575weEiTZgEBWial__PeCjFbRUXsx1zRkNWwfK3dp4
+# sha256: 6tSu-nAHJJ4F5RsBCVcZ1ajdlXYAifVzCqxWmLGTKRg
+# sha256: PMoN8IvQ7ZhDI5BJTOPe0AP15mGqRgvnpzS__jWYNgU
+# sha256: Pt5HDT0XujwHY436DRBFK8G25a0yYSemW6d-aq6xG-w
+# sha256: aMR5ZPcYbuwwaxNilidyK5B5zURH7Z5eyuzU6shMpzQ
+# sha256: 3V05kZUKrkCmyB3hV4lC5z1imAjO_FHRLNFXmA5s_Bg
+# sha256: p3xSBiwH63x7MFRdvHPjKZW34Rfup1Axe1y1x6RhjxQ
+# sha256: ga-a7EvJYKmgEnxIjxh3La5GNGiSM_BvZUQ-exHr61E
+# sha256: 4Hmx2txcBiRswbtv4bI6ULHRFz8u3VEE79QLtzoo9AY
+# sha256: -9JnRncsJMuTyLl8va1cueRshrvbG52KdD7gDi-x_F0
+# sha256: mSZu8wo35Dky3uwrfKc-g8jbw7n_cD7HPsprHa5r7-o
+# sha256: i2zhyZOQl4O8luC0806iI7_3pN8skL25xODxrJKGieM
+pytz==2015.7
+
+# sha256: ET-7pVManjSUW302szoIToul0GZLcDyBp8Vy2RkZpbg
+# sha256: xXeBXdAPE5QgP8ROuXlySwmPiCZKnviY7kW45enPWH8
+requests==2.9.1
+
+# sha256: D_eMQD2bzPWkJabTGhKqa0fxwhyk3CVzp-LzKpczXrE
+# sha256: EF-NaGFvgkjiS_DpNy7wTTzBAQTxmA9U1Xss5zpa1Wo
+six==1.10.0
+
+# sha256: glPOvsSxkJTWfMXtWvmb8duhKFKSIm6Yoxkp-HpdayM
+# sha256: BazGegmYDC7P7dNCP3rgEEg57MtV_GRXc-HKoJUcMDA
+traceback2==1.4.0
+
+# sha256: E_d9CHXbbZtDXh1PQedK1MwutuHVyCSZYJKzQw8Ii7g
+# sha256: IogqDkGMKE4fcYqCKzsCKUTVPS2QjhaQsxmp0-ssBXk
+unittest2==1.1.0
+
+# sha256: aUkbUwUVfDxuDwSnAZhNaud_1yn8HJrNJQd_HfOFMms
+# sha256: 619wCpv8lkILBVY1r5AC02YuQ9gMP_0x8iTCW8DV9GI
+Werkzeug==0.11.3
+
+# sha256: KCwRK1XdjjyGmjVx-GdnwVCrEoSprOK97CJsWSrK-Bo
+zope.component==4.2.2
+
+# sha256: 3HpZov2Rcw03kxMaXSYbKek-xOKpfxvEh86N7-4v54Y
+zope.event==4.1.0
+
+# sha256: 8HtjH3pgHNjL0zMtVPQxQscIioMpn4WTVvCNHU1CWbM
+# sha256: 3lzKCDuUOdgAL7drvmtJmMWlpyH6sluEKYln8ALfTJQ
+# sha256: Z4hBb36n9bipe-lIJTd6ol6L3HNGPge6r5hYsp5zcHc
+# sha256: bzIw9yVFGCAeWjcIy7LemMhIME8G497Yv7OeWCXLouE
+# sha256: X6V1pSQPBCAMMIhCfQ1Le3N_bpAYgYpR2ND5J6aiUXo
+# sha256: UiGUrWpUVzXt11yKg_SNZdGvBk5DKn0yDWT1a6_BLpk
+# sha256: 6Mey1AlD9xyZFIyX9myqf1E0FH9XQj-NtbSCUJnOmgk
+# sha256: J5Ak8CCGAcPKqQfFOHbjetiGJffq8cs4QtvjYLIocBc
+# sha256: LiIanux8zFiImieOoT3P7V75OdgLB4Gamos8scaBSE8
+# sha256: aRGJZUEOyG1E3GuQF-4929WC4MCr7vYrOhnb9sitEys
+# sha256: 0E34aG7IZNDK3ozxmff4OuzUFhCaIINNVo-DEN7RLeo
+# sha256: 51qUfhXul-fnHgLqMC_rL8YtOiu0Zov5377UOlBqx-c
+# sha256: TkXSL7iDIipaufKCoRb-xe4ujRpWjM_2otdbvQ62vPw
+# sha256: vOkzm7PHpV4IA7Y9IcWDno5Hm8hcSt9CrkFbcvlPrLI
+# sha256: koE4NlJFoOiGmlmZ-8wqRUdaCm7VKklNYNvcVAM1_t0
+# sha256: DYQbobuEDuoOZIncXsr6YSVVSXH1O1rLh3ZEQeYbzro
+# sha256: sJyMHUezUxxADgGVaX8UFKYyId5u9HhZik8UYPfZo5I
+zope.interface==4.1.3
+
+# sha256: ilvjjTWOS86xchl0WBZ0YOAw_0rmqdnjNsxb1hq2RD8
+# sha256: T37KMj0TnsuvHIzCCmoww2fpfpOBTj7cd4NAqucXcpw
+acme==0.4.0
+
+# sha256: 33BQiANlNLGqGpirTfdCEElTF9YbpaKiYpTbK4zeGD8
+# sha256: lwsV1OdEzzlMeb08C_PRxaCXZ2vOk_1AI2755rZHmPM
+letsencrypt==0.4.0
+
+# sha256: D3YDaVFjLsMSEfjI5B5D5tn5FeWUtNHYXCObw3ih2tg
+# sha256: VTgvsePYGRmI4IOSAnxoYFHd8KciD73bxIuIHtbVFd8
+letsencrypt-apache==0.4.0
+
+# sha256: uDndLZwRfHAUMMFJlWkYpCOphjtIsJyQ4wpgE-fS9E8
+# sha256: j4MIDaoknQNsvM-4rlzG_wB7iNbZN1ITca-r57Gbrbw
+mock==1.0.1
+
+UNLIKELY_EOF
+ # -------------------------------------------------------------------------
+ cat << "UNLIKELY_EOF" > "$TEMP_DIR/peep.py"
+#!/usr/bin/env python
+"""peep ("prudently examine every package") verifies that packages conform to a
+trusted, locally stored hash and only then installs them::
+
+ peep install -r requirements.txt
+
+This makes your deployments verifiably repeatable without having to maintain a
+local PyPI mirror or use a vendor lib. Just update the version numbers and
+hashes in requirements.txt, and you're all set.
+
+"""
+# This is here so embedded copies of peep.py are MIT-compliant:
+# Copyright (c) 2013 Erik Rose
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the "Software"), to
+# deal in the Software without restriction, including without limitation the
+# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+# sell copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in
+# all copies or substantial portions of the Software.
+from __future__ import print_function
+try:
+ xrange = xrange
+except NameError:
+ xrange = range
+from base64 import urlsafe_b64encode, urlsafe_b64decode
+from binascii import hexlify
+import cgi
+from collections import defaultdict
+from functools import wraps
+from hashlib import sha256
+from itertools import chain, islice
+import mimetypes
+from optparse import OptionParser
+from os.path import join, basename, splitext, isdir
+from pickle import dumps, loads
+import re
+import sys
+from shutil import rmtree, copy
+from sys import argv, exit
+from tempfile import mkdtemp
+import traceback
+try:
+ from urllib2 import build_opener, HTTPHandler, HTTPSHandler, HTTPError
+except ImportError:
+ from urllib.request import build_opener, HTTPHandler, HTTPSHandler
+ from urllib.error import HTTPError
+try:
+ from urlparse import urlparse
+except ImportError:
+ from urllib.parse import urlparse # 3.4
+# TODO: Probably use six to make urllib stuff work across 2/3.
+
+from pkg_resources import require, VersionConflict, DistributionNotFound
+
+# We don't admit our dependency on pip in setup.py, lest a naive user simply
+# say `pip install peep.tar.gz` and thus pull down an untrusted copy of pip
+# from PyPI. Instead, we make sure it's installed and new enough here and spit
+# out an error message if not:
+
+
+def activate(specifier):
+ """Make a compatible version of pip importable. Raise a RuntimeError if we
+ couldn't."""
+ try:
+ for distro in require(specifier):
+ distro.activate()
+ except (VersionConflict, DistributionNotFound):
+ raise RuntimeError('The installed version of pip is too old; peep '
+ 'requires ' + specifier)
+
+# Before 0.6.2, the log module wasn't there, so some
+# of our monkeypatching fails. It probably wouldn't be
+# much work to support even earlier, though.
+activate('pip>=0.6.2')
+
+import pip
+from pip.commands.install import InstallCommand
+try:
+ from pip.download import url_to_path # 1.5.6
+except ImportError:
+ try:
+ from pip.util import url_to_path # 0.7.0
+ except ImportError:
+ from pip.util import url_to_filename as url_to_path # 0.6.2
+from pip.index import PackageFinder, Link
+try:
+ from pip.log import logger
+except ImportError:
+ from pip import logger # 6.0
+from pip.req import parse_requirements
+try:
+ from pip.utils.ui import DownloadProgressBar, DownloadProgressSpinner
+except ImportError:
+ class NullProgressBar(object):
+ def __init__(self, *args, **kwargs):
+ pass
+
+ def iter(self, ret, *args, **kwargs):
+ return ret
+
+ DownloadProgressBar = DownloadProgressSpinner = NullProgressBar
+
+__version__ = 3, 0, 0
+
+try:
+ from pip.index import FormatControl # noqa
+ FORMAT_CONTROL_ARG = 'format_control'
+
+ # The line-numbering bug will be fixed in pip 8. All 7.x releases had it.
+ PIP_MAJOR_VERSION = int(pip.__version__.split('.')[0])
+ PIP_COUNTS_COMMENTS = PIP_MAJOR_VERSION >= 8
+except ImportError:
+ FORMAT_CONTROL_ARG = 'use_wheel' # pre-7
+ PIP_COUNTS_COMMENTS = True
+
+
+ITS_FINE_ITS_FINE = 0
+SOMETHING_WENT_WRONG = 1
+# "Traditional" for command-line errors according to optparse docs:
+COMMAND_LINE_ERROR = 2
+
+ARCHIVE_EXTENSIONS = ('.tar.bz2', '.tar.gz', '.tgz', '.tar', '.zip')
+
+MARKER = object()
+
+
+class PipException(Exception):
+ """When I delegated to pip, it exited with an error."""
+
+ def __init__(self, error_code):
+ self.error_code = error_code
+
+
+class UnsupportedRequirementError(Exception):
+ """An unsupported line was encountered in a requirements file."""
+
+
+class DownloadError(Exception):
+ def __init__(self, link, exc):
+ self.link = link
+ self.reason = str(exc)
+
+ def __str__(self):
+ return 'Downloading %s failed: %s' % (self.link, self.reason)
+
+
+def encoded_hash(sha):
+ """Return a short, 7-bit-safe representation of a hash.
+
+ If you pass a sha256, this results in the hash algorithm that the Wheel
+ format (PEP 427) uses, except here it's intended to be run across the
+ downloaded archive before unpacking.
+
+ """
+ return urlsafe_b64encode(sha.digest()).decode('ascii').rstrip('=')
+
+
+def path_and_line(req):
+ """Return the path and line number of the file from which an
+ InstallRequirement came.
+
+ """
+ path, line = (re.match(r'-r (.*) \(line (\d+)\)$',
+ req.comes_from).groups())
+ return path, int(line)
+
+
+def hashes_above(path, line_number):
+ """Yield hashes from contiguous comment lines before line ``line_number``.
+
+ """
+ def hash_lists(path):
+ """Yield lists of hashes appearing between non-comment lines.
+
+ The lists will be in order of appearance and, for each non-empty
+ list, their place in the results will coincide with that of the
+ line number of the corresponding result from `parse_requirements`
+ (which changed in pip 7.0 to not count comments).
+
+ """
+ hashes = []
+ with open(path) as file:
+ for lineno, line in enumerate(file, 1):
+ match = HASH_COMMENT_RE.match(line)
+ if match: # Accumulate this hash.
+ hashes.append(match.groupdict()['hash'])
+ if not IGNORED_LINE_RE.match(line):
+ yield hashes # Report hashes seen so far.
+ hashes = []
+ elif PIP_COUNTS_COMMENTS:
+ # Comment: count as normal req but have no hashes.
+ yield []
+
+ return next(islice(hash_lists(path), line_number - 1, None))
+
+
+def run_pip(initial_args):
+ """Delegate to pip the given args (starting with the subcommand), and raise
+ ``PipException`` if something goes wrong."""
+ status_code = pip.main(initial_args)
+
+ # Clear out the registrations in the pip "logger" singleton. Otherwise,
+ # loggers keep getting appended to it with every run. Pip assumes only one
+ # command invocation will happen per interpreter lifetime.
+ logger.consumers = []
+
+ if status_code:
+ raise PipException(status_code)
+
+
+def hash_of_file(path):
+ """Return the hash of a downloaded file."""
+ with open(path, 'rb') as archive:
+ sha = sha256()
+ while True:
+ data = archive.read(2 ** 20)
+ if not data:
+ break
+ sha.update(data)
+ return encoded_hash(sha)
+
+
+def is_git_sha(text):
+ """Return whether this is probably a git sha"""
+ # Handle both the full sha as well as the 7-character abbreviation
+ if len(text) in (40, 7):
+ try:
+ int(text, 16)
+ return True
+ except ValueError:
+ pass
+ return False
+
+
+def filename_from_url(url):
+ parsed = urlparse(url)
+ path = parsed.path
+ return path.split('/')[-1]
+
+
+def requirement_args(argv, want_paths=False, want_other=False):
+ """Return an iterable of filtered arguments.
+
+ :arg argv: Arguments, starting after the subcommand
+ :arg want_paths: If True, the returned iterable includes the paths to any
+ requirements files following a ``-r`` or ``--requirement`` option.
+ :arg want_other: If True, the returned iterable includes the args that are
+ not a requirement-file path or a ``-r`` or ``--requirement`` flag.
+
+ """
+ was_r = False
+ for arg in argv:
+ # Allow for requirements files named "-r", don't freak out if there's a
+ # trailing "-r", etc.
+ if was_r:
+ if want_paths:
+ yield arg
+ was_r = False
+ elif arg in ['-r', '--requirement']:
+ was_r = True
+ else:
+ if want_other:
+ yield arg
+
+# any line that is a comment or just whitespace
+IGNORED_LINE_RE = re.compile(r'^(\s*#.*)?\s*$')
+
+HASH_COMMENT_RE = re.compile(
+ r"""
+ \s*\#\s+ # Lines that start with a '#'
+ (?P<hash_type>sha256):\s+ # Hash type is hardcoded to be sha256 for now.
+ (?P<hash>[^\s]+) # Hashes can be anything except '#' or spaces.
+ \s* # Suck up whitespace before the comment or
+ # just trailing whitespace if there is no
+ # comment. Also strip trailing newlines.
+ (?:\#(?P<comment>.*))? # Comments can be anything after a whitespace+#
+ # and are optional.
+ $""", re.X)
+
+
+def peep_hash(argv):
+ """Return the peep hash of one or more files, returning a shell status code
+ or raising a PipException.
+
+ :arg argv: The commandline args, starting after the subcommand
+
+ """
+ parser = OptionParser(
+ usage='usage: %prog hash file [file ...]',
+ description='Print a peep hash line for one or more files: for '
+ 'example, "# sha256: '
+ 'oz42dZy6Gowxw8AelDtO4gRgTW_xPdooH484k7I5EOY".')
+ _, paths = parser.parse_args(args=argv)
+ if paths:
+ for path in paths:
+ print('# sha256:', hash_of_file(path))
+ return ITS_FINE_ITS_FINE
+ else:
+ parser.print_usage()
+ return COMMAND_LINE_ERROR
+
+
+class EmptyOptions(object):
+ """Fake optparse options for compatibility with pip<1.2
+
+ pip<1.2 had a bug in parse_requirements() in which the ``options`` kwarg
+ was required. We work around that by passing it a mock object.
+
+ """
+ default_vcs = None
+ skip_requirements_regex = None
+ isolated_mode = False
+
+
+def memoize(func):
+ """Memoize a method that should return the same result every time on a
+ given instance.
+
+ """
+ @wraps(func)
+ def memoizer(self):
+ if not hasattr(self, '_cache'):
+ self._cache = {}
+ if func.__name__ not in self._cache:
+ self._cache[func.__name__] = func(self)
+ return self._cache[func.__name__]
+ return memoizer
+
+
+def package_finder(argv):
+ """Return a PackageFinder respecting command-line options.
+
+ :arg argv: Everything after the subcommand
+
+ """
+ # We instantiate an InstallCommand and then use some of its private
+ # machinery--its arg parser--for our own purposes, like a virus. This
+ # approach is portable across many pip versions, where more fine-grained
+ # ones are not. Ignoring options that don't exist on the parser (for
+ # instance, --use-wheel) gives us a straightforward method of backward
+ # compatibility.
+ try:
+ command = InstallCommand()
+ except TypeError:
+ # This is likely pip 1.3.0's "__init__() takes exactly 2 arguments (1
+ # given)" error. In that version, InstallCommand takes a top=level
+ # parser passed in from outside.
+ from pip.baseparser import create_main_parser
+ command = InstallCommand(create_main_parser())
+ # The downside is that it essentially ruins the InstallCommand class for
+ # further use. Calling out to pip.main() within the same interpreter, for
+ # example, would result in arguments parsed this time turning up there.
+ # Thus, we deepcopy the arg parser so we don't trash its singletons. Of
+ # course, deepcopy doesn't work on these objects, because they contain
+ # uncopyable regex patterns, so we pickle and unpickle instead. Fun!
+ options, _ = loads(dumps(command.parser)).parse_args(argv)
+
+ # Carry over PackageFinder kwargs that have [about] the same names as
+ # options attr names:
+ possible_options = [
+ 'find_links',
+ FORMAT_CONTROL_ARG,
+ ('allow_all_prereleases', 'pre'),
+ 'process_dependency_links'
+ ]
+ kwargs = {}
+ for option in possible_options:
+ kw, attr = option if isinstance(option, tuple) else (option, option)
+ value = getattr(options, attr, MARKER)
+ if value is not MARKER:
+ kwargs[kw] = value
+
+ # Figure out index_urls:
+ index_urls = [options.index_url] + options.extra_index_urls
+ if options.no_index:
+ index_urls = []
+ index_urls += getattr(options, 'mirrors', [])
+
+ # If pip is new enough to have a PipSession, initialize one, since
+ # PackageFinder requires it:
+ if hasattr(command, '_build_session'):
+ kwargs['session'] = command._build_session(options)
+
+ return PackageFinder(index_urls=index_urls, **kwargs)
+
+
+class DownloadedReq(object):
+ """A wrapper around InstallRequirement which offers additional information
+ based on downloading and examining a corresponding package archive
+
+ These are conceptually immutable, so we can get away with memoizing
+ expensive things.
+
+ """
+ def __init__(self, req, argv, finder):
+ """Download a requirement, compare its hashes, and return a subclass
+ of DownloadedReq depending on its state.
+
+ :arg req: The InstallRequirement I am based on
+ :arg argv: The args, starting after the subcommand
+
+ """
+ self._req = req
+ self._argv = argv
+ self._finder = finder
+
+ # We use a separate temp dir for each requirement so requirements
+ # (from different indices) that happen to have the same archive names
+ # don't overwrite each other, leading to a security hole in which the
+ # latter is a hash mismatch, the former has already passed the
+ # comparison, and the latter gets installed.
+ self._temp_path = mkdtemp(prefix='peep-')
+ # Think of DownloadedReq as a one-shot state machine. It's an abstract
+ # class that ratchets forward to being one of its own subclasses,
+ # depending on its package status. Then it doesn't move again.
+ self.__class__ = self._class()
+
+ def dispose(self):
+ """Delete temp files and dirs I've made. Render myself useless.
+
+ Do not call further methods on me after calling dispose().
+
+ """
+ rmtree(self._temp_path)
+
+ def _version(self):
+ """Deduce the version number of the downloaded package from its filename."""
+ # TODO: Can we delete this method and just print the line from the
+ # reqs file verbatim instead?
+ def version_of_archive(filename, package_name):
+ # Since we know the project_name, we can strip that off the left, strip
+ # any archive extensions off the right, and take the rest as the
+ # version.
+ for ext in ARCHIVE_EXTENSIONS:
+ if filename.endswith(ext):
+ filename = filename[:-len(ext)]
+ break
+ # Handle github sha tarball downloads.
+ if is_git_sha(filename):
+ filename = package_name + '-' + filename
+ if not filename.lower().replace('_', '-').startswith(package_name.lower()):
+ # TODO: Should we replace runs of [^a-zA-Z0-9.], not just _, with -?
+ give_up(filename, package_name)
+ return filename[len(package_name) + 1:] # Strip off '-' before version.
+
+ def version_of_wheel(filename, package_name):
+ # For Wheel files (http://legacy.python.org/dev/peps/pep-0427/#file-
+ # name-convention) we know the format bits are '-' separated.
+ whl_package_name, version, _rest = filename.split('-', 2)
+ # Do the alteration to package_name from PEP 427:
+ our_package_name = re.sub(r'[^\w\d.]+', '_', package_name, re.UNICODE)
+ if whl_package_name != our_package_name:
+ give_up(filename, whl_package_name)
+ return version
+
+ def give_up(filename, package_name):
+ raise RuntimeError("The archive '%s' didn't start with the package name "
+ "'%s', so I couldn't figure out the version number. "
+ "My bad; improve me." %
+ (filename, package_name))
+
+ get_version = (version_of_wheel
+ if self._downloaded_filename().endswith('.whl')
+ else version_of_archive)
+ return get_version(self._downloaded_filename(), self._project_name())
+
+ def _is_always_unsatisfied(self):
+ """Returns whether this requirement is always unsatisfied
+
+ This would happen in cases where we can't determine the version
+ from the filename.
+
+ """
+ # If this is a github sha tarball, then it is always unsatisfied
+ # because the url has a commit sha in it and not the version
+ # number.
+ url = self._url()
+ if url:
+ filename = filename_from_url(url)
+ if filename.endswith(ARCHIVE_EXTENSIONS):
+ filename, ext = splitext(filename)
+ if is_git_sha(filename):
+ return True
+ return False
+
+ @memoize # Avoid hitting the file[cache] over and over.
+ def _expected_hashes(self):
+ """Return a list of known-good hashes for this package."""
+ return hashes_above(*path_and_line(self._req))
+
+ def _download(self, link):
+ """Download a file, and return its name within my temp dir.
+
+ This does no verification of HTTPS certs, but our checking hashes
+ makes that largely unimportant. It would be nice to be able to use the
+ requests lib, which can verify certs, but it is guaranteed to be
+ available only in pip >= 1.5.
+
+ This also drops support for proxies and basic auth, though those could
+ be added back in.
+
+ """
+ # Based on pip 1.4.1's URLOpener but with cert verification removed
+ def opener(is_https):
+ if is_https:
+ opener = build_opener(HTTPSHandler())
+ # Strip out HTTPHandler to prevent MITM spoof:
+ for handler in opener.handlers:
+ if isinstance(handler, HTTPHandler):
+ opener.handlers.remove(handler)
+ else:
+ opener = build_opener()
+ return opener
+
+ # Descended from unpack_http_url() in pip 1.4.1
+ def best_filename(link, response):
+ """Return the most informative possible filename for a download,
+ ideally with a proper extension.
+
+ """
+ content_type = response.info().get('content-type', '')
+ filename = link.filename # fallback
+ # Have a look at the Content-Disposition header for a better guess:
+ content_disposition = response.info().get('content-disposition')
+ if content_disposition:
+ type, params = cgi.parse_header(content_disposition)
+ # We use ``or`` here because we don't want to use an "empty" value
+ # from the filename param:
+ filename = params.get('filename') or filename
+ ext = splitext(filename)[1]
+ if not ext:
+ ext = mimetypes.guess_extension(content_type)
+ if ext:
+ filename += ext
+ if not ext and link.url != response.geturl():
+ ext = splitext(response.geturl())[1]
+ if ext:
+ filename += ext
+ return filename
+
+ # Descended from _download_url() in pip 1.4.1
+ def pipe_to_file(response, path, size=0):
+ """Pull the data off an HTTP response, shove it in a new file, and
+ show progress.
+
+ :arg response: A file-like object to read from
+ :arg path: The path of the new file
+ :arg size: The expected size, in bytes, of the download. 0 for
+ unknown or to suppress progress indication (as for cached
+ downloads)
+
+ """
+ def response_chunks(chunk_size):
+ while True:
+ chunk = response.read(chunk_size)
+ if not chunk:
+ break
+ yield chunk
+
+ print('Downloading %s%s...' % (
+ self._req.req,
+ (' (%sK)' % (size / 1000)) if size > 1000 else ''))
+ progress_indicator = (DownloadProgressBar(max=size).iter if size
+ else DownloadProgressSpinner().iter)
+ with open(path, 'wb') as file:
+ for chunk in progress_indicator(response_chunks(4096), 4096):
+ file.write(chunk)
+
+ url = link.url.split('#', 1)[0]
+ try:
+ response = opener(urlparse(url).scheme != 'http').open(url)
+ except (HTTPError, IOError) as exc:
+ raise DownloadError(link, exc)
+ filename = best_filename(link, response)
+ try:
+ size = int(response.headers['content-length'])
+ except (ValueError, KeyError, TypeError):
+ size = 0
+ pipe_to_file(response, join(self._temp_path, filename), size=size)
+ return filename
+
+ # Based on req_set.prepare_files() in pip bb2a8428d4aebc8d313d05d590f386fa3f0bbd0f
+ @memoize # Avoid re-downloading.
+ def _downloaded_filename(self):
+ """Download the package's archive if necessary, and return its
+ filename.
+
+ --no-deps is implied, as we have reimplemented the bits that would
+ ordinarily do dependency resolution.
+
+ """
+ # Peep doesn't support requirements that don't come down as a single
+ # file, because it can't hash them. Thus, it doesn't support editable
+ # requirements, because pip itself doesn't support editable
+ # requirements except for "local projects or a VCS url". Nor does it
+ # support VCS requirements yet, because we haven't yet come up with a
+ # portable, deterministic way to hash them. In summary, all we support
+ # is == requirements and tarballs/zips/etc.
+
+ # TODO: Stop on reqs that are editable or aren't ==.
+
+ # If the requirement isn't already specified as a URL, get a URL
+ # from an index:
+ link = self._link() or self._finder.find_requirement(self._req, upgrade=False)
+
+ if link:
+ lower_scheme = link.scheme.lower() # pip lower()s it for some reason.
+ if lower_scheme == 'http' or lower_scheme == 'https':
+ file_path = self._download(link)
+ return basename(file_path)
+ elif lower_scheme == 'file':
+ # The following is inspired by pip's unpack_file_url():
+ link_path = url_to_path(link.url_without_fragment)
+ if isdir(link_path):
+ raise UnsupportedRequirementError(
+ "%s: %s is a directory. So that it can compute "
+ "a hash, peep supports only filesystem paths which "
+ "point to files" %
+ (self._req, link.url_without_fragment))
+ else:
+ copy(link_path, self._temp_path)
+ return basename(link_path)
+ else:
+ raise UnsupportedRequirementError(
+ "%s: The download link, %s, would not result in a file "
+ "that can be hashed. Peep supports only == requirements, "
+ "file:// URLs pointing to files (not folders), and "
+ "http:// and https:// URLs pointing to tarballs, zips, "
+ "etc." % (self._req, link.url))
+ else:
+ raise UnsupportedRequirementError(
+ "%s: couldn't determine where to download this requirement from."
+ % (self._req,))
+
+ def install(self):
+ """Install the package I represent, without dependencies.
+
+ Obey typical pip-install options passed in on the command line.
+
+ """
+ other_args = list(requirement_args(self._argv, want_other=True))
+ archive_path = join(self._temp_path, self._downloaded_filename())
+ # -U so it installs whether pip deems the requirement "satisfied" or
+ # not. This is necessary for GitHub-sourced zips, which change without
+ # their version numbers changing.
+ run_pip(['install'] + other_args + ['--no-deps', '-U', archive_path])
+
+ @memoize
+ def _actual_hash(self):
+ """Download the package's archive if necessary, and return its hash."""
+ return hash_of_file(join(self._temp_path, self._downloaded_filename()))
+
+ def _project_name(self):
+ """Return the inner Requirement's "unsafe name".
+
+ Raise ValueError if there is no name.
+
+ """
+ name = getattr(self._req.req, 'project_name', '')
+ if name:
+ return name
+ raise ValueError('Requirement has no project_name.')
+
+ def _name(self):
+ return self._req.name
+
+ def _link(self):
+ try:
+ return self._req.link
+ except AttributeError:
+ # The link attribute isn't available prior to pip 6.1.0, so fall
+ # back to the now deprecated 'url' attribute.
+ return Link(self._req.url) if self._req.url else None
+
+ def _url(self):
+ link = self._link()
+ return link.url if link else None
+
+ @memoize # Avoid re-running expensive check_if_exists().
+ def _is_satisfied(self):
+ self._req.check_if_exists()
+ return (self._req.satisfied_by and
+ not self._is_always_unsatisfied())
+
+ def _class(self):
+ """Return the class I should be, spanning a continuum of goodness."""
+ try:
+ self._project_name()
+ except ValueError:
+ return MalformedReq
+ if self._is_satisfied():
+ return SatisfiedReq
+ if not self._expected_hashes():
+ return MissingReq
+ if self._actual_hash() not in self._expected_hashes():
+ return MismatchedReq
+ return InstallableReq
+
+ @classmethod
+ def foot(cls):
+ """Return the text to be printed once, after all of the errors from
+ classes of my type are printed.
+
+ """
+ return ''
+
+
+class MalformedReq(DownloadedReq):
+ """A requirement whose package name could not be determined"""
+
+ @classmethod
+ def head(cls):
+ return 'The following requirements could not be processed:\n'
+
+ def error(self):
+ return '* Unable to determine package name from URL %s; add #egg=' % self._url()
+
+
+class MissingReq(DownloadedReq):
+ """A requirement for which no hashes were specified in the requirements file"""
+
+ @classmethod
+ def head(cls):
+ return ('The following packages had no hashes specified in the requirements file, which\n'
+ 'leaves them open to tampering. Vet these packages to your satisfaction, then\n'
+ 'add these "sha256" lines like so:\n\n')
+
+ def error(self):
+ if self._url():
+ # _url() always contains an #egg= part, or this would be a
+ # MalformedRequest.
+ line = self._url()
+ else:
+ line = '%s==%s' % (self._name(), self._version())
+ return '# sha256: %s\n%s\n' % (self._actual_hash(), line)
+
+
+class MismatchedReq(DownloadedReq):
+ """A requirement for which the downloaded file didn't match any of my hashes."""
+ @classmethod
+ def head(cls):
+ return ("THE FOLLOWING PACKAGES DIDN'T MATCH THE HASHES SPECIFIED IN THE REQUIREMENTS\n"
+ "FILE. If you have updated the package versions, update the hashes. If not,\n"
+ "freak out, because someone has tampered with the packages.\n\n")
+
+ def error(self):
+ preamble = ' %s: expected' % self._project_name()
+ if len(self._expected_hashes()) > 1:
+ preamble += ' one of'
+ padding = '\n' + ' ' * (len(preamble) + 1)
+ return '%s %s\n%s got %s' % (preamble,
+ padding.join(self._expected_hashes()),
+ ' ' * (len(preamble) - 4),
+ self._actual_hash())
+
+ @classmethod
+ def foot(cls):
+ return '\n'
+
+
+class SatisfiedReq(DownloadedReq):
+ """A requirement which turned out to be already installed"""
+
+ @classmethod
+ def head(cls):
+ return ("These packages were already installed, so we didn't need to download or build\n"
+ "them again. If you installed them with peep in the first place, you should be\n"
+ "safe. If not, uninstall them, then re-attempt your install with peep.\n")
+
+ def error(self):
+ return ' %s' % (self._req,)
+
+
+class InstallableReq(DownloadedReq):
+ """A requirement whose hash matched and can be safely installed"""
+
+
+# DownloadedReq subclasses that indicate an error that should keep us from
+# going forward with installation, in the order in which their errors should
+# be reported:
+ERROR_CLASSES = [MismatchedReq, MissingReq, MalformedReq]
+
+
+def bucket(things, key):
+ """Return a map of key -> list of things."""
+ ret = defaultdict(list)
+ for thing in things:
+ ret[key(thing)].append(thing)
+ return ret
+
+
+def first_every_last(iterable, first, every, last):
+ """Execute something before the first item of iter, something else for each
+ item, and a third thing after the last.
+
+ If there are no items in the iterable, don't execute anything.
+
+ """
+ did_first = False
+ for item in iterable:
+ if not did_first:
+ did_first = True
+ first(item)
+ every(item)
+ if did_first:
+ last(item)
+
+
+def _parse_requirements(path, finder):
+ try:
+ # list() so the generator that is parse_requirements() actually runs
+ # far enough to report a TypeError
+ return list(parse_requirements(
+ path, options=EmptyOptions(), finder=finder))
+ except TypeError:
+ # session is a required kwarg as of pip 6.0 and will raise
+ # a TypeError if missing. It needs to be a PipSession instance,
+ # but in older versions we can't import it from pip.download
+ # (nor do we need it at all) so we only import it in this except block
+ from pip.download import PipSession
+ return list(parse_requirements(
+ path, options=EmptyOptions(), session=PipSession(), finder=finder))
+
+
+def downloaded_reqs_from_path(path, argv):
+ """Return a list of DownloadedReqs representing the requirements parsed
+ out of a given requirements file.
+
+ :arg path: The path to the requirements file
+ :arg argv: The commandline args, starting after the subcommand
+
+ """
+ finder = package_finder(argv)
+ return [DownloadedReq(req, argv, finder) for req in
+ _parse_requirements(path, finder)]
+
+
+def peep_install(argv):
+ """Perform the ``peep install`` subcommand, returning a shell status code
+ or raising a PipException.
+
+ :arg argv: The commandline args, starting after the subcommand
+
+ """
+ output = []
+ out = output.append
+ reqs = []
+ try:
+ req_paths = list(requirement_args(argv, want_paths=True))
+ if not req_paths:
+ out("You have to specify one or more requirements files with the -r option, because\n"
+ "otherwise there's nowhere for peep to look up the hashes.\n")
+ return COMMAND_LINE_ERROR
+
+ # We're a "peep install" command, and we have some requirement paths.
+ reqs = list(chain.from_iterable(
+ downloaded_reqs_from_path(path, argv)
+ for path in req_paths))
+ buckets = bucket(reqs, lambda r: r.__class__)
+
+ # Skip a line after pip's "Cleaning up..." so the important stuff
+ # stands out:
+ if any(buckets[b] for b in ERROR_CLASSES):
+ out('\n')
+
+ printers = (lambda r: out(r.head()),
+ lambda r: out(r.error() + '\n'),
+ lambda r: out(r.foot()))
+ for c in ERROR_CLASSES:
+ first_every_last(buckets[c], *printers)
+
+ if any(buckets[b] for b in ERROR_CLASSES):
+ out('-------------------------------\n'
+ 'Not proceeding to installation.\n')
+ return SOMETHING_WENT_WRONG
+ else:
+ for req in buckets[InstallableReq]:
+ req.install()
+
+ first_every_last(buckets[SatisfiedReq], *printers)
+
+ return ITS_FINE_ITS_FINE
+ except (UnsupportedRequirementError, DownloadError) as exc:
+ out(str(exc))
+ return SOMETHING_WENT_WRONG
+ finally:
+ for req in reqs:
+ req.dispose()
+ print(''.join(output))
+
+
+def peep_port(paths):
+ """Convert a peep requirements file to one compatble with pip-8 hashing.
+
+ Loses comments and tromps on URLs, so the result will need a little manual
+ massaging, but the hard part--the hash conversion--is done for you.
+
+ """
+ if not paths:
+ print('Please specify one or more requirements files so I have '
+ 'something to port.\n')
+ return COMMAND_LINE_ERROR
+ for req in chain.from_iterable(
+ _parse_requirements(path, package_finder(argv)) for path in paths):
+ hashes = [hexlify(urlsafe_b64decode((hash + '=').encode('ascii'))).decode('ascii')
+ for hash in hashes_above(*path_and_line(req))]
+ if not hashes:
+ print(req.req)
+ elif len(hashes) == 1:
+ print('%s --hash=sha256:%s' % (req.req, hashes[0]))
+ else:
+ print('%s' % req.req, end='')
+ for hash in hashes:
+ print(' \\')
+ print(' --hash=sha256:%s' % hash, end='')
+ print()
+
+
+def main():
+ """Be the top-level entrypoint. Return a shell status code."""
+ commands = {'hash': peep_hash,
+ 'install': peep_install,
+ 'port': peep_port}
+ try:
+ if len(argv) >= 2 and argv[1] in commands:
+ return commands[argv[1]](argv[2:])
+ else:
+ # Fall through to top-level pip main() for everything else:
+ return pip.main()
+ except PipException as exc:
+ return exc.error_code
+
+
+def exception_handler(exc_type, exc_value, exc_tb):
+ print('Oh no! Peep had a problem while trying to do stuff. Please write up a bug report')
+ print('with the specifics so we can fix it:')
+ print()
+ print('https://github.com/erikrose/peep/issues/new')
+ print()
+ print('Here are some particulars you can copy and paste into the bug report:')
+ print()
+ print('---')
+ print('peep:', repr(__version__))
+ print('python:', repr(sys.version))
+ print('pip:', repr(getattr(pip, '__version__', 'no __version__ attr')))
+ print('Command line: ', repr(sys.argv))
+ print(
+ ''.join(traceback.format_exception(exc_type, exc_value, exc_tb)))
+ print('---')
+
+
+if __name__ == '__main__':
+ try:
+ exit(main())
+ except Exception:
+ exception_handler(*sys.exc_info())
+ exit(SOMETHING_WENT_WRONG)
+
+UNLIKELY_EOF
+ # -------------------------------------------------------------------------
+ set +e
+ PEEP_OUT=`"$VENV_BIN/python" "$TEMP_DIR/peep.py" install -r "$TEMP_DIR/letsencrypt-auto-requirements.txt"`
+ PEEP_STATUS=$?
+ set -e
+ rm -rf "$TEMP_DIR"
+ if [ "$PEEP_STATUS" != 0 ]; then
+ # Report error. (Otherwise, be quiet.)
+ echo "Had a problem while downloading and verifying Python packages:"
+ echo "$PEEP_OUT"
+ exit 1
+ fi
+ fi
+ echo "Requesting root privileges to run letsencrypt..."
+ echo " " $SUDO "$VENV_BIN/letsencrypt" "$@"
+ $SUDO "$VENV_BIN/letsencrypt" "$@"
+else
+ # Phase 1: Upgrade letsencrypt-auto if neceesary, then self-invoke.
+ #
+ # Each phase checks the version of only the thing it is responsible for
+ # upgrading. Phase 1 checks the version of the latest release of
+ # letsencrypt-auto (which is always the same as that of the letsencrypt
+ # package). Phase 2 checks the version of the locally installed letsencrypt.
+
+ if [ ! -f "$VENV_BIN/letsencrypt" ]; then
+ # If it looks like we've never bootstrapped before, bootstrap:
+ Bootstrap
+ fi
+ if [ "$OS_PACKAGES_ONLY" = 1 ]; then
+ echo "OS packages installed."
+ exit 0
+ fi
+
+ echo "Checking for new version..."
+ TEMP_DIR=$(TempDir)
+ # ---------------------------------------------------------------------------
+ cat << "UNLIKELY_EOF" > "$TEMP_DIR/fetch.py"
+"""Do downloading and JSON parsing without additional dependencies. ::
+
+ # Print latest released version of LE to stdout:
+ python fetch.py --latest-version
+
+ # Download letsencrypt-auto script from git tag v1.2.3 into the folder I'm
+ # in, and make sure its signature verifies:
+ python fetch.py --le-auto-script v1.2.3
+
+On failure, return non-zero.
+
+"""
+from distutils.version import LooseVersion
+from json import loads
+from os import devnull, environ
+from os.path import dirname, join
+import re
+from subprocess import check_call, CalledProcessError
+from sys import argv, exit
+from urllib2 import build_opener, HTTPHandler, HTTPSHandler, HTTPError
+
+PUBLIC_KEY = environ.get('LE_AUTO_PUBLIC_KEY', """-----BEGIN PUBLIC KEY-----
+MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA6MR8W/galdxnpGqBsYbq
+OzQb2eyW15YFjDDEMI0ZOzt8f504obNs920lDnpPD2/KqgsfjOgw2K7xWDJIj/18
+xUvWPk3LDkrnokNiRkA3KOx3W6fHycKL+zID7zy+xZYBuh2fLyQtWV1VGQ45iNRp
+9+Zo7rH86cdfgkdnWTlNSHyTLW9NbXvyv/E12bppPcEvgCTAQXgnDVJ0/sqmeiij
+n9tTFh03aM+R2V/21h8aTraAS24qiPCz6gkmYGC8yr6mglcnNoYbsLNYZ69zF1XH
+cXPduCPdPdfLlzVlKK1/U7hkA28eG3BIAMh6uJYBRJTpiGgaGdPd7YekUB8S6cy+
+CQIDAQAB
+-----END PUBLIC KEY-----
+""")
+
+class ExpectedError(Exception):
+ """A novice-readable exception that also carries the original exception for
+ debugging"""
+
+
+class HttpsGetter(object):
+ def __init__(self):
+ """Build an HTTPS opener."""
+ # Based on pip 1.4.1's URLOpener
+ # This verifies certs on only Python >=2.7.9.
+ self._opener = build_opener(HTTPSHandler())
+ # Strip out HTTPHandler to prevent MITM spoof:
+ for handler in self._opener.handlers:
+ if isinstance(handler, HTTPHandler):
+ self._opener.handlers.remove(handler)
+
+ def get(self, url):
+ """Return the document contents pointed to by an HTTPS URL.
+
+ If something goes wrong (404, timeout, etc.), raise ExpectedError.
+
+ """
+ try:
+ return self._opener.open(url).read()
+ except (HTTPError, IOError) as exc:
+ raise ExpectedError("Couldn't download %s." % url, exc)
+
+
+def write(contents, dir, filename):
+ """Write something to a file in a certain directory."""
+ with open(join(dir, filename), 'w') as file:
+ file.write(contents)
+
+
+def latest_stable_version(get):
+ """Return the latest stable release of letsencrypt."""
+ metadata = loads(get(
+ environ.get('LE_AUTO_JSON_URL',
+ 'https://pypi.python.org/pypi/letsencrypt/json')))
+ # metadata['info']['version'] actually returns the latest of any kind of
+ # release release, contrary to https://wiki.python.org/moin/PyPIJSON.
+ # The regex is a sufficient regex for picking out prereleases for most
+ # packages, LE included.
+ return str(max(LooseVersion(r) for r
+ in metadata['releases'].iterkeys()
+ if re.match('^[0-9.]+$', r)))
+
+
+def verified_new_le_auto(get, tag, temp_dir):
+ """Return the path to a verified, up-to-date letsencrypt-auto script.
+
+ If the download's signature does not verify or something else goes wrong
+ with the verification process, raise ExpectedError.
+
+ """
+ le_auto_dir = environ.get(
+ 'LE_AUTO_DIR_TEMPLATE',
+ 'https://raw.githubusercontent.com/letsencrypt/letsencrypt/%s/'
+ 'letsencrypt-auto-source/') % tag
+ write(get(le_auto_dir + 'letsencrypt-auto'), temp_dir, 'letsencrypt-auto')
+ write(get(le_auto_dir + 'letsencrypt-auto.sig'), temp_dir, 'letsencrypt-auto.sig')
+ write(PUBLIC_KEY, temp_dir, 'public_key.pem')
+ try:
+ with open(devnull, 'w') as dev_null:
+ check_call(['openssl', 'dgst', '-sha256', '-verify',
+ join(temp_dir, 'public_key.pem'),
+ '-signature',
+ join(temp_dir, 'letsencrypt-auto.sig'),
+ join(temp_dir, 'letsencrypt-auto')],
+ stdout=dev_null,
+ stderr=dev_null)
+ except CalledProcessError as exc:
+ raise ExpectedError("Couldn't verify signature of downloaded "
+ "letsencrypt-auto.", exc)
+
+
+def main():
+ get = HttpsGetter().get
+ flag = argv[1]
+ try:
+ if flag == '--latest-version':
+ print latest_stable_version(get)
+ elif flag == '--le-auto-script':
+ tag = argv[2]
+ verified_new_le_auto(get, tag, dirname(argv[0]))
+ except ExpectedError as exc:
+ print exc.args[0], exc.args[1]
+ return 1
+ else:
+ return 0
+
+
+if __name__ == '__main__':
+ exit(main())
+
+UNLIKELY_EOF
+ # ---------------------------------------------------------------------------
+ DeterminePythonVersion
+ REMOTE_VERSION=`"$LE_PYTHON" "$TEMP_DIR/fetch.py" --latest-version`
+ if [ "$LE_AUTO_VERSION" != "$REMOTE_VERSION" ]; then
+ echo "Upgrading letsencrypt-auto $LE_AUTO_VERSION to $REMOTE_VERSION..."
+
+ # Now we drop into Python so we don't have to install even more
+ # dependencies (curl, etc.), for better flow control, and for the option of
+ # future Windows compatibility.
+ "$LE_PYTHON" "$TEMP_DIR/fetch.py" --le-auto-script "v$REMOTE_VERSION"
+
+ # Install new copy of letsencrypt-auto. This preserves permissions and
+ # ownership from the old copy.
+ # TODO: Deal with quotes in pathnames.
+ echo "Replacing letsencrypt-auto..."
+ echo " " $SUDO cp "$TEMP_DIR/letsencrypt-auto" "$0"
+ $SUDO cp "$TEMP_DIR/letsencrypt-auto" "$0"
+ # TODO: Clean up temp dir safely, even if it has quotes in its path.
+ rm -rf "$TEMP_DIR"
+ fi # should upgrade
+ "$0" --no-self-upgrade "$@"
+fi