summaryrefslogtreecommitdiff
path: root/script
diff options
context:
space:
mode:
authorFelipe Sateler <fsateler@debian.org>2019-11-22 21:15:41 -0300
committerFelipe Sateler <fsateler@debian.org>2019-11-22 21:15:41 -0300
commit97b16e5404375cc6cca4469045984cac0eabd335 (patch)
treeb9cfdfec00f4a6afceed718cbb155651d23a51fc /script
parent813ff34b5328e530d94c95cd8235431cde391e4c (diff)
parentd66f980dd002ce94c3196b1a74dc8c1a0788be06 (diff)
Update upstream source from tag 'upstream/1.25.0'
Update to upstream version '1.25.0' with Debian dir 01225dadf264cb86293071829641cb341942031d
Diffstat (limited to 'script')
-rw-r--r--script/Jenkinsfile.fossa20
-rwxr-xr-xscript/build/image11
-rwxr-xr-xscript/build/linux19
-rwxr-xr-xscript/build/linux-entrypoint43
-rwxr-xr-xscript/build/osx7
-rwxr-xr-xscript/build/test-image15
-rw-r--r--script/build/windows.ps116
-rwxr-xr-xscript/build/write-git-sha11
-rwxr-xr-xscript/circle/bintray-deploy.sh2
-rw-r--r--script/fossa.mk16
-rw-r--r--script/release/README.md201
-rwxr-xr-xscript/release/build-binaries40
-rwxr-xr-xscript/release/contributors30
-rwxr-xr-xscript/release/download-binaries39
-rwxr-xr-xscript/release/make-branch86
-rwxr-xr-xscript/release/push-release8
-rw-r--r--script/release/release.md.tmpl34
-rwxr-xr-xscript/release/release.py387
-rwxr-xr-xscript/release/release.sh13
-rw-r--r--script/release/release/__init__.py0
-rw-r--r--script/release/release/bintray.py50
-rw-r--r--script/release/release/const.py10
-rw-r--r--script/release/release/downloader.py72
-rw-r--r--script/release/release/images.py157
-rw-r--r--script/release/release/pypi.py44
-rw-r--r--script/release/release/repository.py246
-rw-r--r--script/release/release/utils.py85
-rwxr-xr-xscript/release/setup-venv.sh47
-rwxr-xr-xscript/run/run.sh16
-rwxr-xr-xscript/setup/osx123
-rw-r--r--script/setup/osx_helpers.sh41
-rwxr-xr-xscript/test/all5
-rwxr-xr-xscript/test/ci3
-rwxr-xr-xscript/test/default9
-rwxr-xr-xscript/test/versions.py48
35 files changed, 1643 insertions, 311 deletions
diff --git a/script/Jenkinsfile.fossa b/script/Jenkinsfile.fossa
new file mode 100644
index 00000000..480e98ef
--- /dev/null
+++ b/script/Jenkinsfile.fossa
@@ -0,0 +1,20 @@
+pipeline {
+ agent any
+ stages {
+ stage("License Scan") {
+ agent {
+ label 'ubuntu-1604-aufs-edge'
+ }
+
+ steps {
+ withCredentials([
+ string(credentialsId: 'fossa-api-key', variable: 'FOSSA_API_KEY')
+ ]) {
+ checkout scm
+ sh "FOSSA_API_KEY='${FOSSA_API_KEY}' BRANCH_NAME='${env.BRANCH_NAME}' make -f script/fossa.mk fossa-analyze"
+ sh "FOSSA_API_KEY='${FOSSA_API_KEY}' make -f script/fossa.mk fossa-test"
+ }
+ }
+ }
+ }
+}
diff --git a/script/build/image b/script/build/image
index a3198c99..fb3f856e 100755
--- a/script/build/image
+++ b/script/build/image
@@ -7,11 +7,14 @@ if [ -z "$1" ]; then
exit 1
fi
-TAG=$1
+TAG="$1"
VERSION="$(python setup.py --version)"
-./script/build/write-git-sha
+DOCKER_COMPOSE_GITSHA="$(script/build/write-git-sha)"
+echo "${DOCKER_COMPOSE_GITSHA}" > compose/GITSHA
python setup.py sdist bdist_wheel
-./script/build/linux
-docker build -t docker/compose:$TAG -f Dockerfile.run .
+
+docker build \
+ --build-arg GIT_COMMIT="${DOCKER_COMPOSE_GITSHA}" \
+ -t "${TAG}" .
diff --git a/script/build/linux b/script/build/linux
index 1a4cd4d9..ca5620b8 100755
--- a/script/build/linux
+++ b/script/build/linux
@@ -4,10 +4,15 @@ set -ex
./script/clean
-TAG="docker-compose"
-docker build -t "$TAG" . | tail -n 200
-docker run \
- --rm --entrypoint="script/build/linux-entrypoint" \
- -v $(pwd)/dist:/code/dist \
- -v $(pwd)/.git:/code/.git \
- "$TAG"
+DOCKER_COMPOSE_GITSHA="$(script/build/write-git-sha)"
+TAG="docker/compose:tmp-glibc-linux-binary-${DOCKER_COMPOSE_GITSHA}"
+
+docker build -t "${TAG}" . \
+ --build-arg BUILD_PLATFORM=debian \
+ --build-arg GIT_COMMIT="${DOCKER_COMPOSE_GITSHA}"
+TMP_CONTAINER=$(docker create "${TAG}")
+mkdir -p dist
+ARCH=$(uname -m)
+docker cp "${TMP_CONTAINER}":/usr/local/bin/docker-compose "dist/docker-compose-Linux-${ARCH}"
+docker container rm -f "${TMP_CONTAINER}"
+docker image rm -f "${TAG}"
diff --git a/script/build/linux-entrypoint b/script/build/linux-entrypoint
index 0e3c7ec1..d607dd5c 100755
--- a/script/build/linux-entrypoint
+++ b/script/build/linux-entrypoint
@@ -2,14 +2,39 @@
set -ex
-TARGET=dist/docker-compose-$(uname -s)-$(uname -m)
-VENV=/code/.tox/py36
+CODE_PATH=/code
+VENV="${CODE_PATH}"/.tox/py37
-mkdir -p `pwd`/dist
-chmod 777 `pwd`/dist
+cd "${CODE_PATH}"
+mkdir -p dist
+chmod 777 dist
-$VENV/bin/pip install -q -r requirements-build.txt
-./script/build/write-git-sha
-su -c "$VENV/bin/pyinstaller docker-compose.spec" user
-mv dist/docker-compose $TARGET
-$TARGET version
+"${VENV}"/bin/pip3 install -q -r requirements-build.txt
+
+# TODO(ulyssessouza) To check if really needed
+if [ -z "${DOCKER_COMPOSE_GITSHA}" ]; then
+ DOCKER_COMPOSE_GITSHA="$(script/build/write-git-sha)"
+fi
+echo "${DOCKER_COMPOSE_GITSHA}" > compose/GITSHA
+
+export PATH="${CODE_PATH}/pyinstaller:${PATH}"
+
+if [ ! -z "${BUILD_BOOTLOADER}" ]; then
+ # Build bootloader for alpine; develop is the main branch
+ git clone --single-branch --branch develop https://github.com/pyinstaller/pyinstaller.git /tmp/pyinstaller
+ cd /tmp/pyinstaller/bootloader
+ # Checkout commit corresponding to version in requirements-build
+ git checkout v3.5
+ "${VENV}"/bin/python3 ./waf configure --no-lsb all
+ "${VENV}"/bin/pip3 install ..
+ cd "${CODE_PATH}"
+ rm -Rf /tmp/pyinstaller
+else
+ echo "NOT compiling bootloader!!!"
+fi
+
+"${VENV}"/bin/pyinstaller --exclude-module pycrypto --exclude-module PyInstaller docker-compose.spec
+ls -la dist/
+ldd dist/docker-compose
+mv dist/docker-compose /usr/local/bin
+docker-compose version
diff --git a/script/build/osx b/script/build/osx
index 0c4b062b..52991458 100755
--- a/script/build/osx
+++ b/script/build/osx
@@ -1,15 +1,16 @@
#!/bin/bash
set -ex
-PATH="/usr/local/bin:$PATH"
+TOOLCHAIN_PATH="$(realpath $(dirname $0)/../../build/toolchain)"
rm -rf venv
-virtualenv -p /usr/local/bin/python3 venv
+virtualenv -p "${TOOLCHAIN_PATH}"/bin/python3 venv
venv/bin/pip install -r requirements.txt
venv/bin/pip install -r requirements-build.txt
venv/bin/pip install --no-deps .
-./script/build/write-git-sha
+DOCKER_COMPOSE_GITSHA="$(script/build/write-git-sha)"
+echo "${DOCKER_COMPOSE_GITSHA}" > compose/GITSHA
venv/bin/pyinstaller docker-compose.spec
mv dist/docker-compose dist/docker-compose-Darwin-x86_64
dist/docker-compose-Darwin-x86_64 version
diff --git a/script/build/test-image b/script/build/test-image
index a2eb62cd..4964a5f9 100755
--- a/script/build/test-image
+++ b/script/build/test-image
@@ -7,11 +7,12 @@ if [ -z "$1" ]; then
exit 1
fi
-TAG=$1
+TAG="$1"
+IMAGE="docker/compose-tests"
-docker build -t docker-compose-tests:tmp .
-ctnr_id=$(docker create --entrypoint=tox docker-compose-tests:tmp)
-docker commit $ctnr_id docker/compose-tests:latest
-docker tag docker/compose-tests:latest docker/compose-tests:$TAG
-docker rm -f $ctnr_id
-docker rmi -f docker-compose-tests:tmp
+DOCKER_COMPOSE_GITSHA="$(script/build/write-git-sha)"
+docker build -t "${IMAGE}:${TAG}" . \
+ --target build \
+ --build-arg BUILD_PLATFORM="debian" \
+ --build-arg GIT_COMMIT="${DOCKER_COMPOSE_GITSHA}"
+docker tag "${IMAGE}":"${TAG}" "${IMAGE}":latest
diff --git a/script/build/windows.ps1 b/script/build/windows.ps1
index 98a74815..4c7a8bed 100644
--- a/script/build/windows.ps1
+++ b/script/build/windows.ps1
@@ -6,17 +6,17 @@
#
# http://git-scm.com/download/win
#
-# 2. Install Python 3.6.4:
+# 2. Install Python 3.7.2:
#
# https://www.python.org/downloads/
#
-# 3. Append ";C:\Python36;C:\Python36\Scripts" to the "Path" environment variable:
+# 3. Append ";C:\Python37;C:\Python37\Scripts" to the "Path" environment variable:
#
# https://www.microsoft.com/resources/documentation/windows/xp/all/proddocs/en-us/sysdm_advancd_environmnt_addchange_variable.mspx?mfr=true
#
# 4. In Powershell, run the following commands:
#
-# $ pip install 'virtualenv>=15.1.0'
+# $ pip install 'virtualenv==16.2.0'
# $ Set-ExecutionPolicy -Scope CurrentUser RemoteSigned
#
# 5. Clone the repository:
@@ -44,16 +44,10 @@ virtualenv .\venv
# pip and pyinstaller generate lots of warnings, so we need to ignore them
$ErrorActionPreference = "Continue"
-# Install dependencies
-# Fix for https://github.com/pypa/pip/issues/3964
-# Remove-Item -Recurse -Force .\venv\Lib\site-packages\pip
-# .\venv\Scripts\easy_install pip==9.0.1
-# .\venv\Scripts\pip install --upgrade pip setuptools
-# End fix
-.\venv\Scripts\pip install pypiwin32==220
+.\venv\Scripts\pip install pypiwin32==223
.\venv\Scripts\pip install -r requirements.txt
.\venv\Scripts\pip install --no-deps .
-.\venv\Scripts\pip install --allow-external pyinstaller -r requirements-build.txt
+.\venv\Scripts\pip install -r requirements-build.txt
git rev-parse --short HEAD | out-file -encoding ASCII compose\GITSHA
diff --git a/script/build/write-git-sha b/script/build/write-git-sha
index d16743c6..cac4b6fd 100755
--- a/script/build/write-git-sha
+++ b/script/build/write-git-sha
@@ -2,6 +2,11 @@
#
# Write the current commit sha to the file GITSHA. This file is included in
# packaging so that `docker-compose version` can include the git sha.
-#
-set -e
-git rev-parse --short HEAD > compose/GITSHA
+# sets to 'unknown' and echoes a message if the command is not successful
+
+DOCKER_COMPOSE_GITSHA="$(git rev-parse --short HEAD)"
+if [[ "${?}" != "0" ]]; then
+ echo "Couldn't get revision of the git repository. Setting to 'unknown' instead"
+ DOCKER_COMPOSE_GITSHA="unknown"
+fi
+echo "${DOCKER_COMPOSE_GITSHA}"
diff --git a/script/circle/bintray-deploy.sh b/script/circle/bintray-deploy.sh
index 8c8871aa..d508da36 100755
--- a/script/circle/bintray-deploy.sh
+++ b/script/circle/bintray-deploy.sh
@@ -1,7 +1,5 @@
#!/bin/bash
-set -x
-
curl -f -u$BINTRAY_USERNAME:$BINTRAY_API_KEY -X GET \
https://api.bintray.com/repos/docker-compose/${CIRCLE_BRANCH}
diff --git a/script/fossa.mk b/script/fossa.mk
new file mode 100644
index 00000000..8d7af49d
--- /dev/null
+++ b/script/fossa.mk
@@ -0,0 +1,16 @@
+# Variables for Fossa
+BUILD_ANALYZER?=docker/fossa-analyzer
+FOSSA_OPTS?=--option all-tags:true --option allow-unresolved:true
+
+fossa-analyze:
+ docker run --rm -e FOSSA_API_KEY=$(FOSSA_API_KEY) \
+ -v $(CURDIR)/$*:/go/src/github.com/docker/compose \
+ -w /go/src/github.com/docker/compose \
+ $(BUILD_ANALYZER) analyze ${FOSSA_OPTS} --branch ${BRANCH_NAME}
+
+ # This command is used to run the fossa test command
+fossa-test:
+ docker run -i -e FOSSA_API_KEY=$(FOSSA_API_KEY) \
+ -v $(CURDIR)/$*:/go/src/github.com/docker/compose \
+ -w /go/src/github.com/docker/compose \
+ $(BUILD_ANALYZER) test
diff --git a/script/release/README.md b/script/release/README.md
new file mode 100644
index 00000000..97168d37
--- /dev/null
+++ b/script/release/README.md
@@ -0,0 +1,201 @@
+# Release HOWTO
+
+This file describes the process of making a public release of `docker-compose`.
+Please read it carefully before proceeding!
+
+## Prerequisites
+
+The following things are required to bring a release to a successful conclusion
+
+### Local Docker engine (Linux Containers)
+
+The release script builds images that will be part of the release.
+
+### Docker Hub account
+
+You should be logged into a Docker Hub account that allows pushing to the
+following repositories:
+
+- docker/compose
+- docker/compose-tests
+
+### Python
+
+The release script is written in Python and requires Python 3.3 at minimum.
+
+### A Github account and Github API token
+
+Your Github account needs to have write access on the `docker/compose` repo.
+To generate a Github token, head over to the
+[Personal access tokens](https://github.com/settings/tokens) page in your
+Github settings and select "Generate new token". Your token should include
+(at minimum) the following scopes:
+
+- `repo:status`
+- `public_repo`
+
+This API token should be exposed to the release script through the
+`GITHUB_TOKEN` environment variable.
+
+### A Bintray account and Bintray API key
+
+Your Bintray account will need to be an admin member of the
+[docker-compose organization](https://bintray.com/docker-compose).
+Additionally, you should generate a personal API key. To do so, click your
+username in the top-right hand corner and select "Edit profile" ; on the new
+page, select "API key" in the left-side menu.
+
+This API key should be exposed to the release script through the
+`BINTRAY_TOKEN` environment variable.
+
+### A PyPi account
+
+Said account needs to be a member of the maintainers group for the
+[`docker-compose` project](https://pypi.org/project/docker-compose/).
+
+Moreover, the `~/.pypirc` file should exist on your host and contain the
+relevant pypi credentials.
+
+The following is a sample `.pypirc` provided as a guideline:
+
+```
+[distutils]
+index-servers =
+ pypi
+
+[pypi]
+username = user
+password = pass
+```
+
+## Start a feature release
+
+A feature release is a release that includes all changes present in the
+`master` branch when initiated. It's typically versioned `X.Y.0-rc1`, where
+Y is the minor version of the previous release incremented by one. A series
+of one or more Release Candidates (RCs) should be made available to the public
+to find and squash potential bugs.
+
+From the root of the Compose repository, run the following command:
+```
+./script/release/release.sh -b <BINTRAY_USERNAME> start X.Y.0-rc1
+```
+
+After a short initialization period, the script will invite you to edit the
+`CHANGELOG.md` file. Do so by being careful to respect the same format as
+previous releases. Once done, the script will display a `diff` of the staged
+changes for the bump commit. Once you validate these, a bump commit will be
+created on the newly created release branch and pushed remotely.
+
+The release tool then waits for the CI to conclude before proceeding.
+If failures are reported, the release will be aborted until these are fixed.
+Please refer to the "Resume a draft release" section below for more details.
+
+Once all resources have been prepared, the release script will exit with a
+message resembling this one:
+
+```
+You're almost done! Please verify that everything is in order and you are ready
+to make the release public, then run the following command:
+./script/release/release.sh -b user finalize X.Y.0-rc1
+```
+
+Once you are ready to finalize the release (making binaries and other versioned
+assets public), proceed to the "Finalize a release" section of this guide.
+
+## Start a patch release
+
+A patch release is a release that builds off a previous release with discrete
+additions. This can be an RC release after RC1 (`X.Y.0-rcZ`, `Z > 1`), a GA release
+based off the final RC (`X.Y.0`), or a bugfix release based off a previous
+GA release (`X.Y.Z`, `Z > 0`).
+
+From the root of the Compose repository, run the following command:
+```
+./script/release/release.sh -b <BINTRAY_USERNAME> start --patch=BASE_VERSION RELEASE_VERSION
+```
+
+The process of starting a patch release is identical to starting a feature
+release except for one difference ; at the beginning, the script will ask for
+PR numbers you wish to cherry-pick into the release. These numbers should
+correspond to existing PRs on the docker/compose repository. Multiple numbers
+should be separated by whitespace.
+
+Once you are ready to finalize the release (making binaries and other versioned
+assets public), proceed to the "Finalize a release" section of this guide.
+
+## Finalize a release
+
+Once you're ready to make your release public, you may execute the following
+command from the root of the Compose repository:
+```
+./script/release/release.sh -b <BINTRAY_USERNAME> finalize RELEASE_VERSION
+```
+
+Note that this command will create and publish versioned assets to the public.
+As a result, it can not be reverted. The command will perform some basic
+sanity checks before doing so, but it is your responsibility to ensure
+everything is in order before pushing the button.
+
+After the command exits, you should make sure:
+
+- The `docker/compose:VERSION` image is available on Docker Hub and functional
+- The `pip install -U docker-compose==VERSION` command correctly installs the
+ specified version
+- The install command on the Github release page installs the new release
+
+## Resume a draft release
+
+"Resuming" a release lets you address the following situations occurring before
+a release is made final:
+
+- Cherry-pick additional PRs to include in the release
+- Resume a release that was aborted because of CI failures after they've been
+ addressed
+- Rebuild / redownload assets after manual changes have been made to the
+ release branch
+- etc.
+
+From the root of the Compose repository, run the following command:
+```
+./script/release/release.sh -b <BINTRAY_USERNAME> resume RELEASE_VERSION
+```
+
+The release tool will attempt to determine what steps it's already been through
+for the specified release and pick up where it left off. Some steps are
+executed again no matter what as it's assumed they'll produce different
+results, like building images or downloading binaries.
+
+## Cancel a draft release
+
+If issues snuck into your release branch, it is sometimes easier to start from
+scratch. Before a release has been finalized, it is possible to cancel it using
+the following command:
+```
+./script/release/release.sh -b <BINTRAY_USERNAME> cancel RELEASE_VERSION
+```
+
+This will remove the release branch with this release (locally and remotely),
+close the associated PR, remove the release page draft on Github and delete
+the Bintray repository for it, allowing you to start fresh.
+
+## Manual operations
+
+Some common, release-related operations are not covered by this tool and should
+be handled manually by the operator:
+
+- After any release:
+ - Announce new release on Slack
+- After a GA release:
+ - Close the release milestone
+ - Merge back `CHANGELOG.md` changes from the `release` branch into `master`
+ - Bump the version in `compose/__init__.py` to the *next* minor version
+ number with `dev` appended. For example, if you just released `1.4.0`,
+ update it to `1.5.0dev`
+ - Update compose_version in [github.com/docker/docker.github.io/blob/master/_config.yml](https://github.com/docker/docker.github.io/blob/master/_config.yml) and [github.com/docker/docker.github.io/blob/master/_config_authoring.yml](https://github.com/docker/docker.github.io/blob/master/_config_authoring.yml)
+ - Update the release note in [github.com/docker/docker.github.io](https://github.com/docker/docker.github.io/blob/master/release-notes/docker-compose.md)
+
+## Advanced options
+
+You can consult the full list of options for the release tool by executing
+`./script/release/release.sh --help`.
diff --git a/script/release/build-binaries b/script/release/build-binaries
deleted file mode 100755
index a39b186d..00000000
--- a/script/release/build-binaries
+++ /dev/null
@@ -1,40 +0,0 @@
-#!/bin/bash
-#
-# Build the release binaries
-#
-
-. "$(dirname "${BASH_SOURCE[0]}")/utils.sh"
-
-function usage() {
- >&2 cat << EOM
-Build binaries for the release.
-
-This script requires that 'git config branch.${BRANCH}.release' is set to the
-release version for the release branch.
-
-EOM
- exit 1
-}
-
-BRANCH="$(git rev-parse --abbrev-ref HEAD)"
-VERSION="$(git config "branch.${BRANCH}.release")" || usage
-REPO=docker/compose
-
-# Build the binaries
-script/clean
-script/build/linux
-
-echo "Building the container distribution"
-script/build/image $VERSION
-
-echo "Building the compose-tests image"
-script/build/test-image $VERSION
-
-echo "Create a github release"
-# TODO: script more of this https://developer.github.com/v3/repos/releases/
-browser https://github.com/$REPO/releases/new
-
-echo "Don't forget to download the osx and windows binaries from appveyor/bintray\!"
-echo "https://dl.bintray.com/docker-compose/$BRANCH/"
-echo "https://ci.appveyor.com/project/docker/compose"
-echo
diff --git a/script/release/contributors b/script/release/contributors
deleted file mode 100755
index 4657dd80..00000000
--- a/script/release/contributors
+++ /dev/null
@@ -1,30 +0,0 @@
-#!/bin/bash
-set -e
-
-
-function usage() {
- >&2 cat << EOM
-Print the list of github contributors for the release
-
-Usage:
-
- $0 <previous release tag>
-EOM
- exit 1
-}
-
-[[ -n "$1" ]] || usage
-PREV_RELEASE=$1
-BRANCH="$(git rev-parse --abbrev-ref HEAD)"
-URL="https://api.github.com/repos/docker/compose/compare"
-
-contribs=$(curl -sf "$URL/$PREV_RELEASE...$BRANCH" | \
- jq -r '.commits[].author.login' | \
- sort | \
- uniq -c | \
- sort -nr)
-
-echo "Contributions by user: "
-echo "$contribs"
-echo
-echo "$contribs" | awk '{print "@"$2","}' | xargs
diff --git a/script/release/download-binaries b/script/release/download-binaries
deleted file mode 100755
index 0b187f6c..00000000
--- a/script/release/download-binaries
+++ /dev/null
@@ -1,39 +0,0 @@
-#!/bin/bash
-
-function usage() {
- >&2 cat << EOM
-Download Linux, Mac OS and Windows binaries from remote endpoints
-
-Usage:
-
- $0 <version>
-
-Options:
-
- version version string for the release (ex: 1.6.0)
-
-EOM
- exit 1
-}
-
-
-[ -n "$1" ] || usage
-VERSION=$1
-BASE_BINTRAY_URL=https://dl.bintray.com/docker-compose/bump-$VERSION/
-DESTINATION=binaries-$VERSION
-APPVEYOR_URL=https://ci.appveyor.com/api/projects/docker/compose/\
-artifacts/dist%2Fdocker-compose-Windows-x86_64.exe?branch=bump-$VERSION
-
-mkdir $DESTINATION
-
-
-wget -O $DESTINATION/docker-compose-Darwin-x86_64 $BASE_BINTRAY_URL/docker-compose-Darwin-x86_64
-wget -O $DESTINATION/docker-compose-Linux-x86_64 $BASE_BINTRAY_URL/docker-compose-Linux-x86_64
-wget -O $DESTINATION/docker-compose-Windows-x86_64.exe $APPVEYOR_URL
-
-echo -e "\n\nCopy the following lines into the integrity check table in the release notes:\n\n"
-cd $DESTINATION
-rm -rf *.sha256
-ls | xargs sha256sum | sed 's/ / | /g' | sed -r 's/([^ |]+)/`\1`/g'
-ls | xargs -I@ bash -c "sha256sum @ | cut -d' ' -f1 > @.sha256"
-cd -
diff --git a/script/release/make-branch b/script/release/make-branch
deleted file mode 100755
index b8a0cd31..00000000
--- a/script/release/make-branch
+++ /dev/null
@@ -1,86 +0,0 @@
-#!/bin/bash
-#
-# Prepare a new release branch
-#
-
-. "$(dirname "${BASH_SOURCE[0]}")/utils.sh"
-
-function usage() {
- >&2 cat << EOM
-Create a new release branch 'release-<version>'
-
-Usage:
-
- $0 <version> [<base_version>]
-
-Options:
-
- version version string for the release (ex: 1.6.0)
- base_version branch or tag to start from. Defaults to master. For
- bug-fix releases use the previous stage release tag.
-
-EOM
- exit 1
-}
-
-
-[ -n "$1" ] || usage
-VERSION=$1
-BRANCH=bump-$VERSION
-REPO=docker/compose
-GITHUB_REPO=git@github.com:$REPO
-
-if [ -z "$2" ]; then
- BASE_VERSION="master"
-else
- BASE_VERSION=$2
-fi
-
-
-DEFAULT_REMOTE=release
-REMOTE="$(find_remote "$GITHUB_REPO")"
-# If we don't have a docker remote add one
-if [ -z "$REMOTE" ]; then
- echo "Creating $DEFAULT_REMOTE remote"
- git remote add ${DEFAULT_REMOTE} ${GITHUB_REPO}
-fi
-
-# handle the difference between a branch and a tag
-if [ -z "$(git name-rev --tags $BASE_VERSION | grep tags)" ]; then
- BASE_VERSION=$REMOTE/$BASE_VERSION
-fi
-
-echo "Creating a release branch $VERSION from $BASE_VERSION"
-read -n1 -r -p "Continue? (ctrl+c to cancel)"
-git fetch $REMOTE -p
-git checkout -b $BRANCH $BASE_VERSION
-
-echo "Merging remote release branch into new release branch"
-git merge --strategy=ours --no-edit $REMOTE/release
-
-# Store the release version for this branch in git, so that other release
-# scripts can use it
-git config "branch.${BRANCH}.release" $VERSION
-
-
-editor=${EDITOR:-vim}
-
-echo "Update versions in compose/__init__.py, script/run/run.sh"
-$editor compose/__init__.py
-$editor script/run/run.sh
-
-
-echo "Write release notes in CHANGELOG.md"
-browser "https://github.com/docker/compose/issues?q=milestone%3A$VERSION+is%3Aclosed"
-$editor CHANGELOG.md
-
-
-git diff
-echo "Verify changes before commit. Exit the shell to commit changes"
-$SHELL || true
-git commit -a -m "Bump $VERSION" --signoff --no-verify
-
-
-echo "Push branch to docker remote"
-git push $REMOTE
-browser https://github.com/$REPO/compare/docker:release...$BRANCH?expand=1
diff --git a/script/release/push-release b/script/release/push-release
index 0578aaff..f28c1d4f 100755
--- a/script/release/push-release
+++ b/script/release/push-release
@@ -26,12 +26,6 @@ if [ -z "$(command -v jq 2> /dev/null)" ]; then
fi
-if [ -z "$(command -v pandoc 2> /dev/null)" ]; then
- >&2 echo "$0 requires http://pandoc.org/"
- >&2 echo "Please install it and make sure it is available on your \$PATH."
- exit 2
-fi
-
API=https://api.github.com/repos
REPO=docker/compose
GITHUB_REPO=git@github.com:$REPO
@@ -59,8 +53,6 @@ docker push docker/compose-tests:latest
docker push docker/compose-tests:$VERSION
echo "Uploading package to PyPI"
-pandoc -f markdown -t rst README.md -o README.rst
-sed -i -e 's/logo.png?raw=true/https:\/\/github.com\/docker\/compose\/raw\/master\/logo.png?raw=true/' README.rst
./script/build/write-git-sha
python setup.py sdist bdist_wheel
if [ "$(command -v twine 2> /dev/null)" ]; then
diff --git a/script/release/release.md.tmpl b/script/release/release.md.tmpl
new file mode 100644
index 00000000..4d0ebe92
--- /dev/null
+++ b/script/release/release.md.tmpl
@@ -0,0 +1,34 @@
+If you're a Mac or Windows user, the best way to install Compose and keep it up-to-date is **[Docker Desktop for Mac and Windows](https://www.docker.com/products/docker-desktop)**.
+
+Docker Desktop will automatically install the latest version of Docker Engine for you.
+
+Alternatively, you can use the usual commands to install or upgrade Compose:
+
+```
+curl -L https://github.com/docker/compose/releases/download/{{version}}/docker-compose-`uname -s`-`uname -m` -o /usr/local/bin/docker-compose
+chmod +x /usr/local/bin/docker-compose
+```
+
+See the [install docs](https://docs.docker.com/compose/install/) for more install options and instructions.
+
+## Compose file format compatibility matrix
+
+| Compose file format | Docker Engine |
+| --- | --- |
+{% for engine, formats in compat_matrix.items() -%}
+| {% for format in formats %}{{format}}{% if not loop.last %}, {% endif %}{% endfor %} | {{engine}}+ |
+{% endfor -%}
+
+## Changes
+
+{{changelog}}
+
+Thanks to {% for name in contributors %}@{{name}}{% if not loop.last %}, {% endif %}{% endfor %} for contributing to this release!
+
+## Integrity check
+
+Binary name | SHA-256 sum
+| --- | --- |
+{% for filename, sha in integrity.items() -%}
+| `{{filename}}` | `{{sha[1]}}` |
+{% endfor -%}
diff --git a/script/release/release.py b/script/release/release.py
new file mode 100755
index 00000000..82bc9a0a
--- /dev/null
+++ b/script/release/release.py
@@ -0,0 +1,387 @@
+from __future__ import absolute_import
+from __future__ import print_function
+from __future__ import unicode_literals
+
+import argparse
+import os
+import shutil
+import sys
+import time
+
+from jinja2 import Template
+from release.bintray import BintrayAPI
+from release.const import BINTRAY_ORG
+from release.const import NAME
+from release.const import REPO_ROOT
+from release.downloader import BinaryDownloader
+from release.images import ImageManager
+from release.images import is_tag_latest
+from release.pypi import check_pypirc
+from release.pypi import pypi_upload
+from release.repository import delete_assets
+from release.repository import get_contributors
+from release.repository import Repository
+from release.repository import upload_assets
+from release.utils import branch_name
+from release.utils import compatibility_matrix
+from release.utils import read_release_notes_from_changelog
+from release.utils import ScriptError
+from release.utils import update_init_py_version
+from release.utils import update_run_sh_version
+from release.utils import yesno
+
+
+def create_initial_branch(repository, args):
+ release_branch = repository.create_release_branch(args.release, args.base)
+ if args.base and args.cherries:
+ print('Detected patch version.')
+ cherries = input('Indicate (space-separated) PR numbers to cherry-pick then press Enter:\n')
+ repository.cherry_pick_prs(release_branch, cherries.split())
+
+ return create_bump_commit(repository, release_branch, args.bintray_user, args.bintray_org)
+
+
+def create_bump_commit(repository, release_branch, bintray_user, bintray_org):
+ with release_branch.config_reader() as cfg:
+ release = cfg.get('release')
+ print('Updating version info in __init__.py and run.sh')
+ update_run_sh_version(release)
+ update_init_py_version(release)
+
+ input('Please add the release notes to the CHANGELOG.md file, then press Enter to continue.')
+ proceed = None
+ while not proceed:
+ print(repository.diff())
+ proceed = yesno('Are these changes ok? y/N ', default=False)
+
+ if repository.diff():
+ repository.create_bump_commit(release_branch, release)
+ repository.push_branch_to_remote(release_branch)
+
+ bintray_api = BintrayAPI(os.environ['BINTRAY_TOKEN'], bintray_user)
+ if not bintray_api.repository_exists(bintray_org, release_branch.name):
+ print('Creating data repository {} on bintray'.format(release_branch.name))
+ bintray_api.create_repository(bintray_org, release_branch.name, 'generic')
+ else:
+ print('Bintray repository {} already exists. Skipping'.format(release_branch.name))
+
+
+def monitor_pr_status(pr_data):
+ print('Waiting for CI to complete...')
+ last_commit = pr_data.get_commits().reversed[0]
+ while True:
+ status = last_commit.get_combined_status()
+ if status.state == 'pending' or status.state == 'failure':
+ summary = {
+ 'pending': 0,
+ 'success': 0,
+ 'failure': 0,
+ 'error': 0,
+ }
+ for detail in status.statuses:
+ if detail.context == 'dco-signed':
+ # dco-signed check breaks on merge remote-tracking ; ignore it
+ continue
+ if detail.state in summary:
+ summary[detail.state] += 1
+ print(
+ '{pending} pending, {success} successes, {failure} failures, '
+ '{error} errors'.format(**summary)
+ )
+ if summary['failure'] > 0 or summary['error'] > 0:
+ raise ScriptError('CI failures detected!')
+ elif summary['pending'] == 0 and summary['success'] > 0:
+ # This check assumes at least 1 non-DCO CI check to avoid race conditions.
+ # If testing on a repo without CI, use --skip-ci-check to avoid looping eternally
+ return True
+ time.sleep(30)
+ elif status.state == 'success':
+ print('{} successes: all clear!'.format(status.total_count))
+ return True
+
+
+def check_pr_mergeable(pr_data):
+ if pr_data.mergeable is False:
+ # mergeable can also be null, in which case the warning would be a false positive.
+ print(
+ 'WARNING!! PR #{} can not currently be merged. You will need to '
+ 'resolve the conflicts manually before finalizing the release.'.format(pr_data.number)
+ )
+
+ return pr_data.mergeable is True
+
+
+def create_release_draft(repository, version, pr_data, files):
+ print('Creating Github release draft')
+ with open(os.path.join(os.path.dirname(__file__), 'release.md.tmpl'), 'r') as f:
+ template = Template(f.read())
+ print('Rendering release notes based on template')
+ release_notes = template.render(
+ version=version,
+ compat_matrix=compatibility_matrix(),
+ integrity=files,
+ contributors=get_contributors(pr_data),
+ changelog=read_release_notes_from_changelog(),
+ )
+ gh_release = repository.create_release(
+ version, release_notes, draft=True, prerelease='-rc' in version,
+ target_commitish='release'
+ )
+ print('Release draft initialized')
+ return gh_release
+
+
+def print_final_instructions(args):
+ print(
+ "You're almost done! Please verify that everything is in order and "
+ "you are ready to make the release public, then run the following "
+ "command:\n{exe} -b {user} finalize {version}".format(
+ exe='./script/release/release.sh', user=args.bintray_user, version=args.release
+ )
+ )
+
+
+def distclean():
+ print('Running distclean...')
+ dirs = [
+ os.path.join(REPO_ROOT, 'build'), os.path.join(REPO_ROOT, 'dist'),
+ os.path.join(REPO_ROOT, 'docker-compose.egg-info')
+ ]
+ files = []
+ for base, dirnames, fnames in os.walk(REPO_ROOT):
+ for fname in fnames:
+ path = os.path.normpath(os.path.join(base, fname))
+ if fname.endswith('.pyc'):
+ files.append(path)
+ elif fname.startswith('.coverage.'):
+ files.append(path)
+ for dirname in dirnames:
+ path = os.path.normpath(os.path.join(base, dirname))
+ if dirname == '__pycache__':
+ dirs.append(path)
+ elif dirname == '.coverage-binfiles':
+ dirs.append(path)
+
+ for file in files:
+ os.unlink(file)
+
+ for folder in dirs:
+ shutil.rmtree(folder, ignore_errors=True)
+
+
+def resume(args):
+ try:
+ distclean()
+ repository = Repository(REPO_ROOT, args.repo)
+ br_name = branch_name(args.release)
+ if not repository.branch_exists(br_name):
+ raise ScriptError('No local branch exists for this release.')
+ gh_release = repository.find_release(args.release)
+ if gh_release and not gh_release.draft:
+ print('WARNING!! Found non-draft (public) release for this version!')
+ proceed = yesno(
+ 'Are you sure you wish to proceed? Modifying an already '
+ 'released version is dangerous! y/N ', default=False
+ )
+ if proceed.lower() is not True:
+ raise ScriptError('Aborting release')
+
+ release_branch = repository.checkout_branch(br_name)
+ if args.cherries:
+ cherries = input('Indicate (space-separated) PR numbers to cherry-pick then press Enter:\n')
+ repository.cherry_pick_prs(release_branch, cherries.split())
+
+ create_bump_commit(repository, release_branch, args.bintray_user, args.bintray_org)
+ pr_data = repository.find_release_pr(args.release)
+ if not pr_data:
+ pr_data = repository.create_release_pull_request(args.release)
+ check_pr_mergeable(pr_data)
+ if not args.skip_ci:
+ monitor_pr_status(pr_data)
+ downloader = BinaryDownloader(args.destination)
+ files = downloader.download_all(args.release)
+ if not gh_release:
+ gh_release = create_release_draft(repository, args.release, pr_data, files)
+ delete_assets(gh_release)
+ upload_assets(gh_release, files)
+ tag_as_latest = is_tag_latest(args.release)
+ img_manager = ImageManager(args.release, tag_as_latest)
+ img_manager.build_images(repository)
+ except ScriptError as e:
+ print(e)
+ return 1
+
+ print_final_instructions(args)
+ return 0
+
+
+def cancel(args):
+ try:
+ repository = Repository(REPO_ROOT, args.repo)
+ repository.close_release_pr(args.release)
+ repository.remove_release(args.release)
+ repository.remove_bump_branch(args.release)
+ bintray_api = BintrayAPI(os.environ['BINTRAY_TOKEN'], args.bintray_user)
+ print('Removing Bintray data repository for {}'.format(args.release))
+ bintray_api.delete_repository(args.bintray_org, branch_name(args.release))
+ distclean()
+ except ScriptError as e:
+ print(e)
+ return 1
+ print('Release cancellation complete.')
+ return 0
+
+
+def start(args):
+ distclean()
+ try:
+ repository = Repository(REPO_ROOT, args.repo)
+ create_initial_branch(repository, args)
+ pr_data = repository.create_release_pull_request(args.release)
+ check_pr_mergeable(pr_data)
+ if not args.skip_ci:
+ monitor_pr_status(pr_data)
+ downloader = BinaryDownloader(args.destination)
+ files = downloader.download_all(args.release)
+ gh_release = create_release_draft(repository, args.release, pr_data, files)
+ upload_assets(gh_release, files)
+ tag_as_latest = is_tag_latest(args.release)
+ img_manager = ImageManager(args.release, tag_as_latest)
+ img_manager.build_images(repository)
+ except ScriptError as e:
+ print(e)
+ return 1
+
+ print_final_instructions(args)
+ return 0
+
+
+def finalize(args):
+ distclean()
+ try:
+ check_pypirc()
+ repository = Repository(REPO_ROOT, args.repo)
+ tag_as_latest = is_tag_latest(args.release)
+ img_manager = ImageManager(args.release, tag_as_latest)
+ pr_data = repository.find_release_pr(args.release)
+ if not pr_data:
+ raise ScriptError('No PR found for {}'.format(args.release))
+ if not check_pr_mergeable(pr_data):
+ raise ScriptError('Can not finalize release with an unmergeable PR')
+ if not img_manager.check_images():
+ raise ScriptError('Missing release image')
+ br_name = branch_name(args.release)
+ if not repository.branch_exists(br_name):
+ raise ScriptError('No local branch exists for this release.')
+ gh_release = repository.find_release(args.release)
+ if not gh_release:
+ raise ScriptError('No Github release draft for this version')
+
+ repository.checkout_branch(br_name)
+
+ os.system('python {setup_script} sdist bdist_wheel'.format(
+ setup_script=os.path.join(REPO_ROOT, 'setup.py')))
+
+ merge_status = pr_data.merge()
+ if not merge_status.merged and not args.finalize_resume:
+ raise ScriptError(
+ 'Unable to merge PR #{}: {}'.format(pr_data.number, merge_status.message)
+ )
+
+ pypi_upload(args)
+
+ img_manager.push_images()
+ repository.publish_release(gh_release)
+ except ScriptError as e:
+ print(e)
+ return 1
+
+ return 0
+
+
+ACTIONS = [
+ 'start',
+ 'cancel',
+ 'resume',
+ 'finalize',
+]
+
+EPILOG = '''Example uses:
+ * Start a new feature release (includes all changes currently in master)
+ release.sh -b user start 1.23.0
+ * Start a new patch release
+ release.sh -b user --patch 1.21.0 start 1.21.1
+ * Cancel / rollback an existing release draft
+ release.sh -b user cancel 1.23.0
+ * Restart a previously aborted patch release
+ release.sh -b user -p 1.21.0 resume 1.21.1
+'''
+
+
+def main():
+ if 'GITHUB_TOKEN' not in os.environ:
+ print('GITHUB_TOKEN environment variable must be set')
+ return 1
+
+ if 'BINTRAY_TOKEN' not in os.environ:
+ print('BINTRAY_TOKEN environment variable must be set')
+ return 1
+
+ parser = argparse.ArgumentParser(
+ description='Orchestrate a new release of docker/compose. This tool assumes that you have '
+ 'obtained a Github API token and Bintray API key and set the GITHUB_TOKEN and '
+ 'BINTRAY_TOKEN environment variables accordingly.',
+ epilog=EPILOG, formatter_class=argparse.RawTextHelpFormatter)
+ parser.add_argument(
+ 'action', choices=ACTIONS, help='The action to be performed for this release'
+ )
+ parser.add_argument('release', help='Release number, e.g. 1.9.0-rc1, 2.1.1')
+ parser.add_argument(
+ '--patch', '-p', dest='base',
+ help='Which version is being patched by this release'
+ )
+ parser.add_argument(
+ '--repo', '-r', dest='repo', default=NAME,
+ help='Start a release for the given repo (default: {})'.format(NAME)
+ )
+ parser.add_argument(
+ '-b', dest='bintray_user', required=True, metavar='USER',
+ help='Username associated with the Bintray API key'
+ )
+ parser.add_argument(
+ '--bintray-org', dest='bintray_org', metavar='ORG', default=BINTRAY_ORG,
+ help='Organization name on bintray where the data repository will be created.'
+ )
+ parser.add_argument(
+ '--destination', '-o', metavar='DIR', default='binaries',
+ help='Directory where release binaries will be downloaded relative to the project root'
+ )
+ parser.add_argument(
+ '--no-cherries', '-C', dest='cherries', action='store_false',
+ help='If set, the program will not prompt the user for PR numbers to cherry-pick'
+ )
+ parser.add_argument(
+ '--skip-ci-checks', dest='skip_ci', action='store_true',
+ help='If set, the program will not wait for CI jobs to complete'
+ )
+ parser.add_argument(
+ '--finalize-resume', dest='finalize_resume', action='store_true',
+ help='If set, finalize will continue through steps that have already been completed.'
+ )
+ args = parser.parse_args()
+
+ if args.action == 'start':
+ return start(args)
+ elif args.action == 'resume':
+ return resume(args)
+ elif args.action == 'cancel':
+ return cancel(args)
+ elif args.action == 'finalize':
+ return finalize(args)
+
+ print('Unexpected action "{}"'.format(args.action), file=sys.stderr)
+ return 1
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/script/release/release.sh b/script/release/release.sh
new file mode 100755
index 00000000..5f853808
--- /dev/null
+++ b/script/release/release.sh
@@ -0,0 +1,13 @@
+#!/bin/sh
+
+if test -d ${VENV_DIR:-./.release-venv}; then
+ true
+else
+ ./script/release/setup-venv.sh
+fi
+
+if test -z "$*"; then
+ args="--help"
+fi
+
+${VENV_DIR:-./.release-venv}/bin/python ./script/release/release.py "$@"
diff --git a/script/release/release/__init__.py b/script/release/release/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/script/release/release/__init__.py
diff --git a/script/release/release/bintray.py b/script/release/release/bintray.py
new file mode 100644
index 00000000..fb4008ad
--- /dev/null
+++ b/script/release/release/bintray.py
@@ -0,0 +1,50 @@
+from __future__ import absolute_import
+from __future__ import unicode_literals
+
+import json
+
+import requests
+
+from .const import NAME
+
+
+class BintrayAPI(requests.Session):
+ def __init__(self, api_key, user, *args, **kwargs):
+ super(BintrayAPI, self).__init__(*args, **kwargs)
+ self.auth = (user, api_key)
+ self.base_url = 'https://api.bintray.com/'
+
+ def create_repository(self, subject, repo_name, repo_type='generic'):
+ url = '{base}repos/{subject}/{repo_name}'.format(
+ base=self.base_url, subject=subject, repo_name=repo_name,
+ )
+ data = {
+ 'name': repo_name,
+ 'type': repo_type,
+ 'private': False,
+ 'desc': 'Automated release for {}: {}'.format(NAME, repo_name),
+ 'labels': ['docker-compose', 'docker', 'release-bot'],
+ }
+ return self.post_json(url, data)
+
+ def repository_exists(self, subject, repo_name):
+ url = '{base}/repos/{subject}/{repo_name}'.format(
+ base=self.base_url, subject=subject, repo_name=repo_name,
+ )
+ result = self.get(url)
+ if result.status_code == 404:
+ return False
+ result.raise_for_status()
+ return True
+
+ def delete_repository(self, subject, repo_name):
+ url = '{base}repos/{subject}/{repo_name}'.format(
+ base=self.base_url, subject=subject, repo_name=repo_name,
+ )
+ return self.delete(url)
+
+ def post_json(self, url, data, **kwargs):
+ if 'headers' not in kwargs:
+ kwargs['headers'] = {}
+ kwargs['headers']['Content-Type'] = 'application/json'
+ return self.post(url, data=json.dumps(data), **kwargs)
diff --git a/script/release/release/const.py b/script/release/release/const.py
new file mode 100644
index 00000000..52458ea1
--- /dev/null
+++ b/script/release/release/const.py
@@ -0,0 +1,10 @@
+from __future__ import absolute_import
+from __future__ import unicode_literals
+
+import os
+
+
+REPO_ROOT = os.path.join(os.path.dirname(__file__), '..', '..', '..')
+NAME = 'docker/compose'
+COMPOSE_TESTS_IMAGE_BASE_NAME = NAME + '-tests'
+BINTRAY_ORG = 'docker-compose'
diff --git a/script/release/release/downloader.py b/script/release/release/downloader.py
new file mode 100644
index 00000000..d92ae78b
--- /dev/null
+++ b/script/release/release/downloader.py
@@ -0,0 +1,72 @@
+from __future__ import absolute_import
+from __future__ import print_function
+from __future__ import unicode_literals
+
+import hashlib
+import os
+
+import requests
+
+from .const import BINTRAY_ORG
+from .const import NAME
+from .const import REPO_ROOT
+from .utils import branch_name
+
+
+class BinaryDownloader(requests.Session):
+ base_bintray_url = 'https://dl.bintray.com/{}'.format(BINTRAY_ORG)
+ base_appveyor_url = 'https://ci.appveyor.com/api/projects/{}/artifacts/'.format(NAME)
+
+ def __init__(self, destination, *args, **kwargs):
+ super(BinaryDownloader, self).__init__(*args, **kwargs)
+ self.destination = destination
+ os.makedirs(self.destination, exist_ok=True)
+
+ def download_from_bintray(self, repo_name, filename):
+ print('Downloading {} from bintray'.format(filename))
+ url = '{base}/{repo_name}/{filename}'.format(
+ base=self.base_bintray_url, repo_name=repo_name, filename=filename
+ )
+ full_dest = os.path.join(REPO_ROOT, self.destination, filename)
+ return self._download(url, full_dest)
+
+ def download_from_appveyor(self, branch_name, filename):
+ print('Downloading {} from appveyor'.format(filename))
+ url = '{base}/dist%2F{filename}?branch={branch_name}'.format(
+ base=self.base_appveyor_url, filename=filename, branch_name=branch_name
+ )
+ full_dest = os.path.join(REPO_ROOT, self.destination, filename)
+ return self._download(url, full_dest)
+
+ def _download(self, url, full_dest):
+ m = hashlib.sha256()
+ with open(full_dest, 'wb') as f:
+ r = self.get(url, stream=True)
+ for chunk in r.iter_content(chunk_size=1024 * 600, decode_unicode=False):
+ print('.', end='', flush=True)
+ m.update(chunk)
+ f.write(chunk)
+
+ print(' download complete')
+ hex_digest = m.hexdigest()
+ with open(full_dest + '.sha256', 'w') as f:
+ f.write('{} {}\n'.format(hex_digest, os.path.basename(full_dest)))
+ return full_dest, hex_digest
+
+ def download_all(self, version):
+ files = {
+ 'docker-compose-Darwin-x86_64': None,
+ 'docker-compose-Linux-x86_64': None,
+ 'docker-compose-Windows-x86_64.exe': None,
+ }
+
+ for filename in files.keys():
+ if 'Windows' in filename:
+ files[filename] = self.download_from_appveyor(
+ branch_name(version), filename
+ )
+ else:
+ files[filename] = self.download_from_bintray(
+ branch_name(version), filename
+ )
+ return files
diff --git a/script/release/release/images.py b/script/release/release/images.py
new file mode 100644
index 00000000..17d572df
--- /dev/null
+++ b/script/release/release/images.py
@@ -0,0 +1,157 @@
+from __future__ import absolute_import
+from __future__ import print_function
+from __future__ import unicode_literals
+
+import base64
+import json
+import os
+
+import docker
+from enum import Enum
+
+from .const import NAME
+from .const import REPO_ROOT
+from .utils import ScriptError
+from .utils import yesno
+from script.release.release.const import COMPOSE_TESTS_IMAGE_BASE_NAME
+
+
+class Platform(Enum):
+ ALPINE = 'alpine'
+ DEBIAN = 'debian'
+
+ def __str__(self):
+ return self.value
+
+
+# Checks if this version respects the GA version format ('x.y.z') and not an RC
+def is_tag_latest(version):
+ ga_version = all(n.isdigit() for n in version.split('.')) and version.count('.') == 2
+ return ga_version and yesno('Should this release be tagged as \"latest\"? [Y/n]: ', default=True)
+
+
+class ImageManager(object):
+ def __init__(self, version, latest=False):
+ self.docker_client = docker.APIClient(**docker.utils.kwargs_from_env())
+ self.version = version
+ self.latest = latest
+ if 'HUB_CREDENTIALS' in os.environ:
+ print('HUB_CREDENTIALS found in environment, issuing login')
+ credentials = json.loads(base64.urlsafe_b64decode(os.environ['HUB_CREDENTIALS']))
+ self.docker_client.login(
+ username=credentials['Username'], password=credentials['Password']
+ )
+
+ def _tag(self, image, existing_tag, new_tag):
+ existing_repo_tag = '{image}:{tag}'.format(image=image, tag=existing_tag)
+ new_repo_tag = '{image}:{tag}'.format(image=image, tag=new_tag)
+ self.docker_client.tag(existing_repo_tag, new_repo_tag)
+
+ def get_full_version(self, platform=None):
+ return self.version + '-' + platform.__str__() if platform else self.version
+
+ def get_runtime_image_tag(self, tag):
+ return '{image_base_image}:{tag}'.format(
+ image_base_image=NAME,
+ tag=self.get_full_version(tag)
+ )
+
+ def build_runtime_image(self, repository, platform):
+ git_sha = repository.write_git_sha()
+ compose_image_base_name = NAME
+ print('Building {image} image ({platform} based)'.format(
+ image=compose_image_base_name,
+ platform=platform
+ ))
+ full_version = self.get_full_version(platform)
+ build_tag = self.get_runtime_image_tag(platform)
+ logstream = self.docker_client.build(
+ REPO_ROOT,
+ tag=build_tag,
+ buildargs={
+ 'BUILD_PLATFORM': platform.value,
+ 'GIT_COMMIT': git_sha,
+ },
+ decode=True
+ )
+ for chunk in logstream:
+ if 'error' in chunk:
+ raise ScriptError('Build error: {}'.format(chunk['error']))
+ if 'stream' in chunk:
+ print(chunk['stream'], end='')
+
+ if platform == Platform.ALPINE:
+ self._tag(compose_image_base_name, full_version, self.version)
+ if self.latest:
+ self._tag(compose_image_base_name, full_version, platform)
+ if platform == Platform.ALPINE:
+ self._tag(compose_image_base_name, full_version, 'latest')
+
+ def get_ucp_test_image_tag(self, tag=None):
+ return '{image}:{tag}'.format(
+ image=COMPOSE_TESTS_IMAGE_BASE_NAME,
+ tag=tag or self.version
+ )
+
+ # Used for producing a test image for UCP
+ def build_ucp_test_image(self, repository):
+ print('Building test image (debian based for UCP e2e)')
+ git_sha = repository.write_git_sha()
+ ucp_test_image_tag = self.get_ucp_test_image_tag()
+ logstream = self.docker_client.build(
+ REPO_ROOT,
+ tag=ucp_test_image_tag,
+ target='build',
+ buildargs={
+ 'BUILD_PLATFORM': Platform.DEBIAN.value,
+ 'GIT_COMMIT': git_sha,
+ },
+ decode=True
+ )
+ for chunk in logstream:
+ if 'error' in chunk:
+ raise ScriptError('Build error: {}'.format(chunk['error']))
+ if 'stream' in chunk:
+ print(chunk['stream'], end='')
+
+ self._tag(COMPOSE_TESTS_IMAGE_BASE_NAME, self.version, 'latest')
+
+ def build_images(self, repository):
+ self.build_runtime_image(repository, Platform.ALPINE)
+ self.build_runtime_image(repository, Platform.DEBIAN)
+ self.build_ucp_test_image(repository)
+
+ def check_images(self):
+ for name in self.get_images_to_push():
+ try:
+ self.docker_client.inspect_image(name)
+ except docker.errors.ImageNotFound:
+ print('Expected image {} was not found'.format(name))
+ return False
+ return True
+
+ def get_images_to_push(self):
+ tags_to_push = {
+ "{}:{}".format(NAME, self.version),
+ self.get_runtime_image_tag(Platform.ALPINE),
+ self.get_runtime_image_tag(Platform.DEBIAN),
+ self.get_ucp_test_image_tag(),
+ self.get_ucp_test_image_tag('latest'),
+ }
+ if is_tag_latest(self.version):
+ tags_to_push.add("{}:latest".format(NAME))
+ return tags_to_push
+
+ def push_images(self):
+ tags_to_push = self.get_images_to_push()
+ print('Build tags to push {}'.format(tags_to_push))
+ for name in tags_to_push:
+ print('Pushing {} to Docker Hub'.format(name))
+ logstream = self.docker_client.push(name, stream=True, decode=True)
+ for chunk in logstream:
+ if 'status' in chunk:
+ print(chunk['status'])
+ if 'error' in chunk:
+ raise ScriptError(
+ 'Error pushing {name}: {err}'.format(name=name, err=chunk['error'])
+ )
diff --git a/script/release/release/pypi.py b/script/release/release/pypi.py
new file mode 100644
index 00000000..dc0b0cb9
--- /dev/null
+++ b/script/release/release/pypi.py
@@ -0,0 +1,44 @@
+from __future__ import absolute_import
+from __future__ import unicode_literals
+
+from configparser import Error
+from requests.exceptions import HTTPError
+from twine.commands.upload import main as twine_upload
+from twine.utils import get_config
+
+from .utils import ScriptError
+
+
+def pypi_upload(args):
+ print('Uploading to PyPi')
+ try:
+ rel = args.release.replace('-rc', 'rc')
+ twine_upload([
+ 'dist/docker_compose-{}*.whl'.format(rel),
+ 'dist/docker-compose-{}*.tar.gz'.format(rel)
+ ])
+ except HTTPError as e:
+ if e.response.status_code == 400 and 'File already exists' in str(e):
+ if not args.finalize_resume:
+ raise ScriptError(
+ 'Package already uploaded on PyPi.'
+ )
+ print('Skipping PyPi upload - package already uploaded')
+ else:
+ raise ScriptError('Unexpected HTTP error uploading package to PyPi: {}'.format(e))
+
+
+def check_pypirc():
+ try:
+ config = get_config()
+ except Error as e:
+ raise ScriptError('Failed to parse .pypirc file: {}'.format(e))
+
+ if config is None:
+ raise ScriptError('Failed to parse .pypirc file')
+
+ if 'pypi' not in config:
+ raise ScriptError('Missing [pypi] section in .pypirc file')
+
+ if not (config['pypi'].get('username') and config['pypi'].get('password')):
+ raise ScriptError('Missing login/password pair for pypi repo')
diff --git a/script/release/release/repository.py b/script/release/release/repository.py
new file mode 100644
index 00000000..a0281eaa
--- /dev/null
+++ b/script/release/release/repository.py
@@ -0,0 +1,246 @@
+from __future__ import absolute_import
+from __future__ import unicode_literals
+
+import os
+import tempfile
+
+import requests
+from git import GitCommandError
+from git import Repo
+from github import Github
+
+from .const import NAME
+from .const import REPO_ROOT
+from .utils import branch_name
+from .utils import read_release_notes_from_changelog
+from .utils import ScriptError
+
+
+class Repository(object):
+ def __init__(self, root=None, gh_name=None):
+ if root is None:
+ root = REPO_ROOT
+ if gh_name is None:
+ gh_name = NAME
+ self.git_repo = Repo(root)
+ self.gh_client = Github(os.environ['GITHUB_TOKEN'])
+ self.gh_repo = self.gh_client.get_repo(gh_name)
+
+ def create_release_branch(self, version, base=None):
+ print('Creating release branch {} based on {}...'.format(version, base or 'master'))
+ remote = self.find_remote(self.gh_repo.full_name)
+ br_name = branch_name(version)
+ remote.fetch()
+ if self.branch_exists(br_name):
+ raise ScriptError(
+ "Branch {} already exists locally. Please remove it before "
+ "running the release script, or use `resume` instead.".format(
+ br_name
+ )
+ )
+ if base is not None:
+ base = self.git_repo.tag('refs/tags/{}'.format(base))
+ else:
+ base = 'refs/remotes/{}/master'.format(remote.name)
+ release_branch = self.git_repo.create_head(br_name, commit=base)
+ release_branch.checkout()
+ self.git_repo.git.merge('--strategy=ours', '--no-edit', '{}/release'.format(remote.name))
+ with release_branch.config_writer() as cfg:
+ cfg.set_value('release', version)
+ return release_branch
+
+ def find_remote(self, remote_name=None):
+ if not remote_name:
+ remote_name = self.gh_repo.full_name
+ for remote in self.git_repo.remotes:
+ for url in remote.urls:
+ if remote_name in url:
+ return remote
+ return None
+
+ def create_bump_commit(self, bump_branch, version):
+ print('Creating bump commit...')
+ bump_branch.checkout()
+ self.git_repo.git.commit('-a', '-s', '-m "Bump {}"'.format(version), '--no-verify')
+
+ def diff(self):
+ return self.git_repo.git.diff()
+
+ def checkout_branch(self, name):
+ return self.git_repo.branches[name].checkout()
+
+ def push_branch_to_remote(self, branch, remote_name=None):
+ print('Pushing branch {} to remote...'.format(branch.name))
+ remote = self.find_remote(remote_name)
+ remote.push(refspec=branch, force=True)
+
+ def branch_exists(self, name):
+ return name in [h.name for h in self.git_repo.heads]
+
+ def create_release_pull_request(self, version):
+ return self.gh_repo.create_pull(
+ title='Bump {}'.format(version),
+ body='Automated release for docker-compose {}\n\n{}'.format(
+ version, read_release_notes_from_changelog()
+ ),
+ base='release',
+ head=branch_name(version),
+ )
+
+ def create_release(self, version, release_notes, **kwargs):
+ return self.gh_repo.create_git_release(
+ tag=version, name=version, message=release_notes, **kwargs
+ )
+
+ def find_release(self, version):
+ print('Retrieving release draft for {}'.format(version))
+ releases = self.gh_repo.get_releases()
+ for release in releases:
+ if release.tag_name == version and release.title == version:
+ return release
+ return None
+
+ def publish_release(self, release):
+ release.update_release(
+ name=release.title,
+ message=release.body,
+ draft=False,
+ prerelease=release.prerelease
+ )
+
+ def remove_release(self, version):
+ print('Removing release draft for {}'.format(version))
+ releases = self.gh_repo.get_releases()
+ for release in releases:
+ if release.tag_name == version and release.title == version:
+ if not release.draft:
+ print(
+ 'The release at {} is no longer a draft. If you TRULY intend '
+ 'to remove it, please do so manually.'.format(release.url)
+ )
+ continue
+ release.delete_release()
+
+ def remove_bump_branch(self, version, remote_name=None):
+ name = branch_name(version)
+ if not self.branch_exists(name):
+ return False
+ print('Removing local branch "{}"'.format(name))
+ if self.git_repo.active_branch.name == name:
+ print('Active branch is about to be deleted. Checking out to master...')
+ try:
+ self.checkout_branch('master')
+ except GitCommandError:
+ raise ScriptError(
+ 'Unable to checkout master. Try stashing local changes before proceeding.'
+ )
+ self.git_repo.branches[name].delete(self.git_repo, name, force=True)
+ print('Removing remote branch "{}"'.format(name))
+ remote = self.find_remote(remote_name)
+ try:
+ remote.push(name, delete=True)
+ except GitCommandError as e:
+ if 'remote ref does not exist' in str(e):
+ return False
+ raise ScriptError(
+ 'Error trying to remove remote branch: {}'.format(e)
+ )
+ return True
+
+ def find_release_pr(self, version):
+ print('Retrieving release PR for {}'.format(version))
+ name = branch_name(version)
+ open_prs = self.gh_repo.get_pulls(state='open')
+ for pr in open_prs:
+ if pr.head.ref == name:
+ print('Found matching PR #{}'.format(pr.number))
+ return pr
+ print('No open PR for this release branch.')
+ return None
+
+ def close_release_pr(self, version):
+ print('Retrieving and closing release PR for {}'.format(version))
+ name = branch_name(version)
+ open_prs = self.gh_repo.get_pulls(state='open')
+ count = 0
+ for pr in open_prs:
+ if pr.head.ref == name:
+ print('Found matching PR #{}'.format(pr.number))
+ pr.edit(state='closed')
+ count += 1
+ if count == 0:
+ print('No open PR for this release branch.')
+ return count
+
+ def write_git_sha(self):
+ with open(os.path.join(REPO_ROOT, 'compose', 'GITSHA'), 'w') as f:
+ f.write(self.git_repo.head.commit.hexsha[:7])
+ return self.git_repo.head.commit.hexsha[:7]
+
+ def cherry_pick_prs(self, release_branch, ids):
+ if not ids:
+ return
+ release_branch.checkout()
+ for i in ids:
+ try:
+ i = int(i)
+ except ValueError as e:
+ raise ScriptError('Invalid PR id: {}'.format(e))
+ print('Retrieving PR#{}'.format(i))
+ pr = self.gh_repo.get_pull(i)
+ patch_data = requests.get(pr.patch_url).text
+ self.apply_patch(patch_data)
+
+ def apply_patch(self, patch_data):
+ with tempfile.NamedTemporaryFile(mode='w', prefix='_compose_cherry', encoding='utf-8') as f:
+ f.write(patch_data)
+ f.flush()
+ self.git_repo.git.am('--3way', f.name)
+
+ def get_prs_in_milestone(self, version):
+ milestones = self.gh_repo.get_milestones(state='open')
+ milestone = None
+ for ms in milestones:
+ if ms.title == version:
+ milestone = ms
+ break
+ if not milestone:
+ print('Didn\'t find a milestone matching "{}"'.format(version))
+ return None
+
+ issues = self.gh_repo.get_issues(milestone=milestone, state='all')
+ prs = []
+ for issue in issues:
+ if issue.pull_request is not None:
+ prs.append(issue.number)
+ return sorted(prs)
+
+
+def get_contributors(pr_data):
+ commits = pr_data.get_commits()
+ authors = {}
+ for commit in commits:
+ if not commit or not commit.author or not commit.author.login:
+ continue
+ author = commit.author.login
+ authors[author] = authors.get(author, 0) + 1
+ return [x[0] for x in sorted(list(authors.items()), key=lambda x: x[1])]
+
+
+def upload_assets(gh_release, files):
+ print('Uploading binaries and hash sums')
+ for filename, filedata in files.items():
+ print('Uploading {}...'.format(filename))
+ gh_release.upload_asset(filedata[0], content_type='application/octet-stream')
+ gh_release.upload_asset('{}.sha256'.format(filedata[0]), content_type='text/plain')
+ print('Uploading run.sh...')
+ gh_release.upload_asset(
+ os.path.join(REPO_ROOT, 'script', 'run', 'run.sh'), content_type='text/plain'
+ )
+
+
+def delete_assets(gh_release):
+ print('Removing previously uploaded assets')
+ for asset in gh_release.get_assets():
+ print('Deleting asset {}'.format(asset.name))
+ asset.delete_asset()
diff --git a/script/release/release/utils.py b/script/release/release/utils.py
new file mode 100644
index 00000000..977a0a71
--- /dev/null
+++ b/script/release/release/utils.py
@@ -0,0 +1,85 @@
+from __future__ import absolute_import
+from __future__ import unicode_literals
+
+import os
+import re
+
+from .const import REPO_ROOT
+from compose import const as compose_const
+
+section_header_re = re.compile(r'^[0-9]+\.[0-9]+\.[0-9]+ \([0-9]{4}-[01][0-9]-[0-3][0-9]\)$')
+
+
+class ScriptError(Exception):
+ pass
+
+
+def branch_name(version):
+ return 'bump-{}'.format(version)
+
+
+def read_release_notes_from_changelog():
+ with open(os.path.join(REPO_ROOT, 'CHANGELOG.md'), 'r') as f:
+ lines = f.readlines()
+ i = 0
+ while i < len(lines):
+ if section_header_re.match(lines[i]):
+ break
+ i += 1
+
+ j = i + 1
+ while j < len(lines):
+ if section_header_re.match(lines[j]):
+ break
+ j += 1
+
+ return ''.join(lines[i + 2:j - 1])
+
+
+def update_init_py_version(version):
+ path = os.path.join(REPO_ROOT, 'compose', '__init__.py')
+ with open(path, 'r') as f:
+ contents = f.read()
+ contents = re.sub(r"__version__ = '[0-9a-z.-]+'", "__version__ = '{}'".format(version), contents)
+ with open(path, 'w') as f:
+ f.write(contents)
+
+
+def update_run_sh_version(version):
+ path = os.path.join(REPO_ROOT, 'script', 'run', 'run.sh')
+ with open(path, 'r') as f:
+ contents = f.read()
+ contents = re.sub(r'VERSION="[0-9a-z.-]+"', 'VERSION="{}"'.format(version), contents)
+ with open(path, 'w') as f:
+ f.write(contents)
+
+
+def compatibility_matrix():
+ result = {}
+ for engine_version in compose_const.API_VERSION_TO_ENGINE_VERSION.values():
+ result[engine_version] = []
+ for fmt, api_version in compose_const.API_VERSIONS.items():
+ result[compose_const.API_VERSION_TO_ENGINE_VERSION[api_version]].append(fmt.vstring)
+ return result
+
+
+def yesno(prompt, default=None):
+ """
+ Prompt the user for a yes or no.
+
+ Can optionally specify a default value, which will only be
+ used if they enter a blank line.
+
+ Unrecognised input (anything other than "y", "n", "yes",
+ "no" or "") will return None.
+ """
+ answer = input(prompt).strip().lower()
+
+ if answer == "y" or answer == "yes":
+ return True
+ elif answer == "n" or answer == "no":
+ return False
+ elif answer == "":
+ return default
+ else:
+ return None
diff --git a/script/release/setup-venv.sh b/script/release/setup-venv.sh
new file mode 100755
index 00000000..ab419be0
--- /dev/null
+++ b/script/release/setup-venv.sh
@@ -0,0 +1,47 @@
+#!/bin/bash
+
+debian_based() { test -f /etc/debian_version; }
+
+if test -z $VENV_DIR; then
+ VENV_DIR=./.release-venv
+fi
+
+if test -z $PYTHONBIN; then
+ PYTHONBIN=$(which python3)
+ if test -z $PYTHONBIN; then
+ PYTHONBIN=$(which python)
+ fi
+fi
+
+VERSION=$($PYTHONBIN -c "import sys; print('{}.{}'.format(*sys.version_info[0:2]))")
+if test $(echo $VERSION | cut -d. -f1) -lt 3; then
+ echo "Python 3.3 or above is required"
+fi
+
+if test $(echo $VERSION | cut -d. -f2) -lt 3; then
+ echo "Python 3.3 or above is required"
+fi
+
+# Debian / Ubuntu workaround:
+# https://askubuntu.com/questions/879437/ensurepip-is-disabled-in-debian-ubuntu-for-the-system-python
+if debian_based; then
+ VENV_FLAGS="$VENV_FLAGS --without-pip"
+fi
+
+$PYTHONBIN -m venv $VENV_DIR $VENV_FLAGS
+
+VENV_PYTHONBIN=$VENV_DIR/bin/python
+
+if debian_based; then
+ curl https://bootstrap.pypa.io/get-pip.py -o $VENV_DIR/get-pip.py
+ $VENV_PYTHONBIN $VENV_DIR/get-pip.py
+fi
+
+$VENV_PYTHONBIN -m pip install -U Jinja2==2.10 \
+ PyGithub==1.39 \
+ GitPython==2.1.9 \
+ requests==2.18.4 \
+ setuptools==40.6.2 \
+ twine==1.11.0
+
+$VENV_PYTHONBIN setup.py develop
diff --git a/script/run/run.sh b/script/run/run.sh
index 1e4bd985..ffeec59a 100755
--- a/script/run/run.sh
+++ b/script/run/run.sh
@@ -15,7 +15,7 @@
set -e
-VERSION="1.21.0"
+VERSION="1.25.0"
IMAGE="docker/compose:$VERSION"
@@ -47,11 +47,17 @@ if [ -n "$HOME" ]; then
fi
# Only allocate tty if we detect one
-if [ -t 1 ]; then
- DOCKER_RUN_OPTIONS="-t"
+if [ -t 0 -a -t 1 ]; then
+ DOCKER_RUN_OPTIONS="$DOCKER_RUN_OPTIONS -t"
fi
-if [ -t 0 ]; then
- DOCKER_RUN_OPTIONS="$DOCKER_RUN_OPTIONS -i"
+
+# Always set -i to support piped and terminal input in run/exec
+DOCKER_RUN_OPTIONS="$DOCKER_RUN_OPTIONS -i"
+
+
+# Handle userns security
+if [ ! -z "$(docker info 2>/dev/null | grep userns)" ]; then
+ DOCKER_RUN_OPTIONS="$DOCKER_RUN_OPTIONS --userns=host"
fi
exec docker run --rm $DOCKER_RUN_OPTIONS $DOCKER_ADDR $COMPOSE_OPTIONS $VOLUMES -w "$(pwd)" $IMAGE "$@"
diff --git a/script/setup/osx b/script/setup/osx
index 972e79ef..69280f8a 100755
--- a/script/setup/osx
+++ b/script/setup/osx
@@ -1,43 +1,110 @@
-#!/bin/bash
+#!/usr/bin/env bash
set -ex
-python_version() {
- python -V 2>&1
-}
+. $(dirname $0)/osx_helpers.sh
-python3_version() {
- python3 -V 2>&1
-}
-
-openssl_version() {
- python -c "import ssl; print ssl.OPENSSL_VERSION"
-}
+DEPLOYMENT_TARGET=${DEPLOYMENT_TARGET:-"$(macos_version)"}
+SDK_FETCH=
+if ! [ ${DEPLOYMENT_TARGET} == "$(macos_version)" ]; then
+ SDK_FETCH=1
+ # SDK URL from https://github.com/docker/golang-cross/blob/master/osx-cross.sh
+ SDK_URL=https://s3.dockerproject.org/darwin/v2/MacOSX${DEPLOYMENT_TARGET}.sdk.tar.xz
+ SDK_SHA1=dd228a335194e3392f1904ce49aff1b1da26ca62
+fi
-desired_python3_version="3.6.4"
-desired_python3_brew_version="3.6.4_2"
-python3_formula="https://raw.githubusercontent.com/Homebrew/homebrew-core/b4e69a9a592232fa5a82741f6acecffc2f1d198d/Formula/python3.rb"
+OPENSSL_VERSION=1.1.1c
+OPENSSL_URL=https://www.openssl.org/source/openssl-${OPENSSL_VERSION}.tar.gz
+OPENSSL_SHA1=71b830a077276cbeccc994369538617a21bee808
-PATH="/usr/local/bin:$PATH"
+PYTHON_VERSION=3.7.4
+PYTHON_URL=https://www.python.org/ftp/python/${PYTHON_VERSION}/Python-${PYTHON_VERSION}.tgz
+PYTHON_SHA1=fb1d764be8a9dcd40f2f152a610a0ab04e0d0ed3
-if !(which brew); then
+#
+# Install prerequisites.
+#
+if ! [ -x "$(command -v brew)" ]; then
ruby -e "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install)"
fi
+if ! [ -x "$(command -v grealpath)" ]; then
+ brew update > /dev/null
+ brew install coreutils
+fi
+if ! [ -x "$(command -v python3)" ]; then
+ brew update > /dev/null
+ brew install python3
+fi
+if ! [ -x "$(command -v virtualenv)" ]; then
+ pip install virtualenv==16.2.0
+fi
-brew update > /dev/null
-
-if !(python3_version | grep "$desired_python3_version"); then
- if brew list | grep python3; then
- brew unlink python3
- fi
+#
+# Create toolchain directory.
+#
+BUILD_PATH="$(grealpath $(dirname $0)/../../build)"
+mkdir -p ${BUILD_PATH}
+TOOLCHAIN_PATH="${BUILD_PATH}/toolchain"
+mkdir -p ${TOOLCHAIN_PATH}
- brew install "$python3_formula"
- brew switch python3 "$desired_python3_brew_version"
+#
+# Set macOS SDK.
+#
+if [[ ${SDK_FETCH} && ! -f ${TOOLCHAIN_PATH}/MacOSX${DEPLOYMENT_TARGET}.sdk/SDKSettings.plist ]]; then
+ SDK_PATH=${TOOLCHAIN_PATH}/MacOSX${DEPLOYMENT_TARGET}.sdk
+ fetch_tarball ${SDK_URL} ${SDK_PATH} ${SDK_SHA1}
+else
+ SDK_PATH="$(xcode-select --print-path)/Platforms/MacOSX.platform/Developer/SDKs/MacOSX${DEPLOYMENT_TARGET}.sdk"
fi
-echo "*** Using $(python3_version) ; $(python_version)"
-echo "*** Using $(openssl_version)"
+#
+# Build OpenSSL.
+#
+OPENSSL_SRC_PATH=${TOOLCHAIN_PATH}/openssl-${OPENSSL_VERSION}
+if ! [[ $(${TOOLCHAIN_PATH}/bin/openssl version) == *"${OPENSSL_VERSION}"* ]]; then
+ rm -rf ${OPENSSL_SRC_PATH}
+ fetch_tarball ${OPENSSL_URL} ${OPENSSL_SRC_PATH} ${OPENSSL_SHA1}
+ (
+ cd ${OPENSSL_SRC_PATH}
+ export MACOSX_DEPLOYMENT_TARGET=${DEPLOYMENT_TARGET}
+ export SDKROOT=${SDK_PATH}
+ ./Configure darwin64-x86_64-cc --prefix=${TOOLCHAIN_PATH}
+ make install_sw install_dev
+ )
+fi
-if !(which virtualenv); then
- pip install virtualenv
+#
+# Build Python.
+#
+PYTHON_SRC_PATH=${TOOLCHAIN_PATH}/Python-${PYTHON_VERSION}
+if ! [[ $(${TOOLCHAIN_PATH}/bin/python3 --version) == *"${PYTHON_VERSION}"* ]]; then
+ rm -rf ${PYTHON_SRC_PATH}
+ fetch_tarball ${PYTHON_URL} ${PYTHON_SRC_PATH} ${PYTHON_SHA1}
+ (
+ cd ${PYTHON_SRC_PATH}
+ ./configure --prefix=${TOOLCHAIN_PATH} \
+ --enable-ipv6 --without-ensurepip --with-dtrace --without-gcc \
+ --datarootdir=${TOOLCHAIN_PATH}/share \
+ --datadir=${TOOLCHAIN_PATH}/share \
+ --enable-framework=${TOOLCHAIN_PATH}/Frameworks \
+ --with-openssl=${TOOLCHAIN_PATH} \
+ MACOSX_DEPLOYMENT_TARGET=${DEPLOYMENT_TARGET} \
+ CFLAGS="-isysroot ${SDK_PATH} -I${TOOLCHAIN_PATH}/include" \
+ CPPFLAGS="-I${SDK_PATH}/usr/include -I${TOOLCHAIN_PATH}/include" \
+ LDFLAGS="-isysroot ${SDK_PATH} -L ${TOOLCHAIN_PATH}/lib"
+ make -j 4
+ make install PYTHONAPPSDIR=${TOOLCHAIN_PATH}
+ make frameworkinstallextras PYTHONAPPSDIR=${TOOLCHAIN_PATH}/share
+ )
fi
+
+#
+# Smoke test built Python.
+#
+openssl_version ${TOOLCHAIN_PATH}
+
+echo ""
+echo "*** Targeting macOS: ${DEPLOYMENT_TARGET}"
+echo "*** Using SDK ${SDK_PATH}"
+echo "*** Using $(python3_version ${TOOLCHAIN_PATH})"
+echo "*** Using $(openssl_version ${TOOLCHAIN_PATH})"
diff --git a/script/setup/osx_helpers.sh b/script/setup/osx_helpers.sh
new file mode 100644
index 00000000..d60a30b6
--- /dev/null
+++ b/script/setup/osx_helpers.sh
@@ -0,0 +1,41 @@
+#!/usr/bin/env bash
+
+# Check file's ($1) SHA1 ($2).
+check_sha1() {
+ echo -n "$2 *$1" | shasum -c -
+}
+
+# Download URL ($1) to path ($2).
+download() {
+ curl -L $1 -o $2
+}
+
+# Extract tarball ($1) in folder ($2).
+extract() {
+ tar xf $1 -C $2
+}
+
+# Download URL ($1), check SHA1 ($3), and extract utility ($2).
+fetch_tarball() {
+ url=$1
+ tarball=$2.tarball
+ sha1=$3
+ download $url $tarball
+ check_sha1 $tarball $sha1
+ extract $tarball $(dirname $tarball)
+}
+
+# Version of Python at toolchain path ($1).
+python3_version() {
+ $1/bin/python3 -V 2>&1
+}
+
+# Version of OpenSSL used by toolchain ($1) Python.
+openssl_version() {
+ $1/bin/python3 -c "import ssl; print(ssl.OPENSSL_VERSION)"
+}
+
+# System macOS version.
+macos_version() {
+ sw_vers -productVersion | cut -f1,2 -d'.'
+}
diff --git a/script/test/all b/script/test/all
index e48f73bb..f929a57e 100755
--- a/script/test/all
+++ b/script/test/all
@@ -8,8 +8,7 @@ set -e
docker run --rm \
--tty \
${GIT_VOLUME} \
- --entrypoint="tox" \
- "$TAG" -e pre-commit
+ "$TAG" tox -e pre-commit
get_versions="docker run --rm
--entrypoint=/code/.tox/py27/bin/python
@@ -24,7 +23,7 @@ fi
BUILD_NUMBER=${BUILD_NUMBER-$USER}
-PY_TEST_VERSIONS=${PY_TEST_VERSIONS:-py27,py36}
+PY_TEST_VERSIONS=${PY_TEST_VERSIONS:-py27,py37}
for version in $DOCKER_VERSIONS; do
>&2 echo "Running tests against Docker $version"
diff --git a/script/test/ci b/script/test/ci
index 8d3aa56c..bbcedac4 100755
--- a/script/test/ci
+++ b/script/test/ci
@@ -20,6 +20,3 @@ export DOCKER_DAEMON_ARGS="--storage-driver=$STORAGE_DRIVER"
GIT_VOLUME="--volumes-from=$(hostname)"
. script/test/all
-
->&2 echo "Building Linux binary"
-. script/build/linux-entrypoint
diff --git a/script/test/default b/script/test/default
index aabb4e42..4f307f2e 100755
--- a/script/test/default
+++ b/script/test/default
@@ -3,17 +3,18 @@
set -ex
-TAG="docker-compose:$(git rev-parse --short HEAD)"
+TAG="docker-compose:alpine-$(git rev-parse --short HEAD)"
-# By default use the Dockerfile, but can be overriden to use an alternative file
-# e.g DOCKERFILE=Dockerfile.armhf script/test/default
+# By default use the Dockerfile, but can be overridden to use an alternative file
+# e.g DOCKERFILE=Dockerfile.s390x script/test/default
DOCKERFILE="${DOCKERFILE:-Dockerfile}"
+DOCKER_BUILD_TARGET="${DOCKER_BUILD_TARGET:-build}"
rm -rf coverage-html
# Create the host directory so it's owned by $USER
mkdir -p coverage-html
-docker build -f ${DOCKERFILE} -t "$TAG" .
+docker build -f "${DOCKERFILE}" -t "${TAG}" --target "${DOCKER_BUILD_TARGET}" .
GIT_VOLUME="--volume=$(pwd)/.git:/code/.git"
. script/test/all
diff --git a/script/test/versions.py b/script/test/versions.py
index f699f268..a06c49f2 100755
--- a/script/test/versions.py
+++ b/script/test/versions.py
@@ -36,23 +36,24 @@ import requests
GITHUB_API = 'https://api.github.com/repos'
+STAGES = ['tp', 'beta', 'rc']
-class Version(namedtuple('_Version', 'major minor patch rc edition')):
+
+class Version(namedtuple('_Version', 'major minor patch stage edition')):
@classmethod
def parse(cls, version):
edition = None
version = version.lstrip('v')
- version, _, rc = version.partition('-')
- if rc:
- if 'rc' not in rc:
- edition = rc
- rc = None
- elif '-' in rc:
- edition, rc = rc.split('-')
-
+ version, _, stage = version.partition('-')
+ if stage:
+ if not any(marker in stage for marker in STAGES):
+ edition = stage
+ stage = None
+ elif '-' in stage:
+ edition, stage = stage.split('-')
major, minor, patch = version.split('.', 3)
- return cls(major, minor, patch, rc, edition)
+ return cls(major, minor, patch, stage, edition)
@property
def major_minor(self):
@@ -63,14 +64,22 @@ class Version(namedtuple('_Version', 'major minor patch rc edition')):
"""Return a representation that allows this object to be sorted
correctly with the default comparator.
"""
- # rc releases should appear before official releases
- rc = (0, self.rc) if self.rc else (1, )
- return (int(self.major), int(self.minor), int(self.patch)) + rc
+ # non-GA releases should appear before GA releases
+ # Order: tp -> beta -> rc -> GA
+ if self.stage:
+ for st in STAGES:
+ if st in self.stage:
+ stage = (STAGES.index(st), self.stage)
+ break
+ else:
+ stage = (len(STAGES),)
+
+ return (int(self.major), int(self.minor), int(self.patch)) + stage
def __str__(self):
- rc = '-{}'.format(self.rc) if self.rc else ''
+ stage = '-{}'.format(self.stage) if self.stage else ''
edition = '-{}'.format(self.edition) if self.edition else ''
- return '.'.join(map(str, self[:3])) + edition + rc
+ return '.'.join(map(str, self[:3])) + edition + stage
BLACKLIST = [ # List of versions known to be broken and should not be used
@@ -113,9 +122,9 @@ def get_latest_versions(versions, num=1):
def get_default(versions):
- """Return a :class:`Version` for the latest non-rc version."""
+ """Return a :class:`Version` for the latest GA version."""
for version in versions:
- if not version.rc:
+ if not version.stage:
return version
@@ -123,8 +132,9 @@ def get_versions(tags):
for tag in tags:
try:
v = Version.parse(tag['name'])
- if v not in BLACKLIST:
- yield v
+ if v in BLACKLIST:
+ continue
+ yield v
except ValueError:
print("Skipping invalid tag: {name}".format(**tag), file=sys.stderr)